From a402b73f42ef9c7b94c2656a0b1b1070fc6693e0 Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 14:57:00 +0300 Subject: [PATCH 1/8] Project setup --- .github/CODEOWNERS | 1 + .github/ISSUE_TEMPLATE/bug_report.md | 30 ++ .github/ISSUE_TEMPLATE/feature_request.md | 20 + .github/dependabot.yml | 12 + .github/policies/resourceManagement.yml | 101 ++++ .github/pull_request_template.md | 23 + .github/workflows/auto-merge-dependabot.yml | 32 ++ .github/workflows/build.yml | 42 ++ .github/workflows/codeql-analysis.yml | 70 +++ .github/workflows/conflicting-pr-label.yml | 38 ++ .github/workflows/publish.yml | 50 ++ .gitignore | 508 +++++++++++++------ .pylintrc | 519 ++++++++++++++++++++ README.md | 25 +- pyproject.toml | 45 ++ requirements-dev.txt | 19 + 16 files changed, 1367 insertions(+), 168 deletions(-) create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/dependabot.yml create mode 100644 .github/policies/resourceManagement.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/auto-merge-dependabot.yml create mode 100644 .github/workflows/build.yml create mode 100644 .github/workflows/codeql-analysis.yml create mode 100644 .github/workflows/conflicting-pr-label.yml create mode 100644 .github/workflows/publish.yml create mode 100644 .pylintrc create mode 100644 pyproject.toml create mode 100644 requirements-dev.txt diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..d45027e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @microsoft/kiota-write diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..674b54b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,30 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "" +labels: bug +--- + +**Environment** + +- Python Version +- kiota-serialization-multipart version: +- OS: + +**Stack trace (if available)** +Screenshot or `formatted` copy and paste of your stack trace. + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Additional context** +Add any other context about the problem here. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..6a33282 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest a feature for this project +title: '' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..c78c2ed --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: +- package-ecosystem: pip + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 +- package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 \ No newline at end of file diff --git a/.github/policies/resourceManagement.yml b/.github/policies/resourceManagement.yml new file mode 100644 index 0000000..0f7b81e --- /dev/null +++ b/.github/policies/resourceManagement.yml @@ -0,0 +1,101 @@ +id: +name: GitOps.PullRequestIssueManagement +description: GitOps.PullRequestIssueManagement primitive +owner: +resource: repository +disabled: false +where: +configuration: + resourceManagementConfiguration: + scheduledSearches: + - description: + frequencies: + - hourly: + hour: 6 + filters: + - isIssue + - isOpen + - hasLabel: + label: 'Needs: Author Feedback' + - hasLabel: + label: 'Status: No Recent Activity' + - noActivitySince: + days: 3 + actions: + - closeIssue + - description: + frequencies: + - hourly: + hour: 6 + filters: + - isIssue + - isOpen + - hasLabel: + label: 'Needs: Author Feedback' + - noActivitySince: + days: 4 + - isNotLabeledWith: + label: 'Status: No Recent Activity' + actions: + - addLabel: + label: 'Status: No Recent Activity' + - addReply: + reply: This issue has been automatically marked as stale because it has been marked as requiring author feedback but has not had any activity for **4 days**. It will be closed if no further activity occurs **within 3 days of this comment**. + - description: + frequencies: + - hourly: + hour: 6 + filters: + - isIssue + - isOpen + - hasLabel: + label: 'Resolution: Duplicate' + - noActivitySince: + days: 1 + actions: + - addReply: + reply: This issue has been marked as duplicate and has not had any activity for **1 day**. It will be closed for housekeeping purposes. + - closeIssue + eventResponderTasks: + - if: + - payloadType: Issue_Comment + - isAction: + action: Created + - isActivitySender: + issueAuthor: True + - hasLabel: + label: 'Needs: Author Feedback' + - isOpen + then: + - addLabel: + label: 'Needs: Attention :wave:' + - removeLabel: + label: 'Needs: Author Feedback' + description: + - if: + - payloadType: Issues + - not: + isAction: + action: Closed + - hasLabel: + label: 'Status: No Recent Activity' + then: + - removeLabel: + label: 'Status: No Recent Activity' + description: + - if: + - payloadType: Issue_Comment + - hasLabel: + label: 'Status: No Recent Activity' + then: + - removeLabel: + label: 'Status: No Recent Activity' + description: + - if: + - payloadType: Pull_Request + then: + - inPrLabel: + label: WIP + description: +onFailure: +onSuccess: diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..f125a0d --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,23 @@ +## Overview + +Brief description of what this PR does, and why it is needed. + +## Related Issue + +Fixes # (issue) + +### Demo + +Optional. Screenshots, `curl` examples, etc. + +### Notes + +Optional. Ancillary topics, caveats, alternative strategies that didn't work out, anything else. + +## Testing Instructions + +* How to test this PR +* Prefer bulleted description +* Start after checking out this branch +* Include any setup required, such as bundling scripts, restarting services, etc. +* Include test case, and expected output \ No newline at end of file diff --git a/.github/workflows/auto-merge-dependabot.yml b/.github/workflows/auto-merge-dependabot.yml new file mode 100644 index 0000000..6e5953f --- /dev/null +++ b/.github/workflows/auto-merge-dependabot.yml @@ -0,0 +1,32 @@ +name: Auto-merge dependabot updates + +on: + pull_request: + branches: [ main ] + +permissions: + pull-requests: write + contents: write + +jobs: + + dependabot-merge: + + runs-on: ubuntu-latest + + if: ${{ github.actor == 'dependabot[bot]' }} + + steps: + - name: Dependabot metadata + id: metadata + uses: dependabot/fetch-metadata@v1.6.0 + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + + - name: Enable auto-merge for Dependabot PRs + # Only if version bump is not a major version change + if: ${{steps.metadata.outputs.update-type != 'version-update:semver-major'}} + run: gh pr merge --auto --merge "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..89e78d0 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,42 @@ +name: Python Serialization Multipart + +on: + workflow_dispatch: + workflow_call: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements-dev.txt + - name: Check code format + run: | + yapf -dr kiota_serialization_multipart + - name: Check import order + run: | + isort kiota_serialization_multipart + - name: Lint with Pylint + run: | + pylint kiota_serialization_multipart --disable=W --rcfile=.pylintrc + - name: Static type checking with Mypy + run: | + mypy kiota_serialization_multipart + - name: Run tests with Pytest + run: | + pytest diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000..9dd3b32 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,70 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "main" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "main" ] + schedule: + - cron: '41 19 * * 0' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/conflicting-pr-label.yml b/.github/workflows/conflicting-pr-label.yml new file mode 100644 index 0000000..fb4a14e --- /dev/null +++ b/.github/workflows/conflicting-pr-label.yml @@ -0,0 +1,38 @@ +# This is a basic workflow to help you get started with Actions + +name: PullRequestConflicting + +# Controls when the action will run. Triggers the workflow on push or pull request +# events but only for the master branch +on: + push: + branches: [ main ] + pull_request: + types: [synchronize] + branches: [ main ] + +permissions: + pull-requests: write + contents: read + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - name: check if prs are dirty + uses: eps1lon/actions-label-merge-conflict@releases/2.x + if: env.LABELING_TOKEN != '' && env.LABELING_TOKEN != null + id: check + with: + dirtyLabel: "conflicting" + repoToken: "${{ secrets.GITHUB_TOKEN }}" + continueOnMissingPermissions: true + commentOnDirty: 'This pull request has conflicting changes, the author must resolve the conflicts before this pull request can be merged.' + commentOnClean: 'Conflicts have been resolved. A maintainer will take a look shortly.' + env: + LABELING_TOKEN: ${{secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..af0971d --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,50 @@ +name: Publish package to PyPI and create release + +on: + push: + tags: + - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 + +permissions: + contents: write + +jobs: + build: + uses: ./.github/workflows/build.yml + + publish: + name: Publish distribution to PyPI + runs-on: ubuntu-latest + environment: pypi_prod + needs: [build] + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: Install flit + run: | + pip install flit + - name: Publish the distibution to PyPI + run: flit publish + env: + FLIT_INDEX_URL: https://upload.pypi.org/legacy/ + FLIT_USERNAME: __token__ + FLIT_PASSWORD: ${{ secrets. PYPI_API_TOKEN }} + + release: + name: Create release + runs-on: ubuntu-latest + needs: [publish] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Extract release notes + id: extract-release-notes + uses: ffurrer2/extract-release-notes@v2 + - name: Create release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: gh release create --notes '${{ steps.extract-release-notes.outputs.release_notes }}' --title ${{ github.ref_name }} ${{ github.ref_name }} diff --git a/.gitignore b/.gitignore index 68bc17f..dfcfd56 100644 --- a/.gitignore +++ b/.gitignore @@ -1,160 +1,350 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj *.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..f8cc4b4 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,519 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=long-suffix, + old-ne-operator, + old-octal-literal, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + eq-without-hash, + too-few-public-methods, + missing-module-docstring, + missing-class-docstring, + missing-function-docstring, + C0103, + E0110, + R0801, + R0904, + R0911, + R0912, + + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether the implicit-str-concat-in-sequence should +# generate a warning on implicit string concatenation in sequences defined over +# several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=12 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=builtins.BaseException, + builtins.Exception \ No newline at end of file diff --git a/README.md b/README.md index 5cd7cec..2bcf94f 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,21 @@ -# Project +# Microsoft Kiota Multipart Serialization Library for Python +[![PyPI version](https://badge.fury.io/py/microsoft-kiota-serialization-multipart.svg)](https://badge.fury.io/py/microsoft-kiota-serialization-multipart) +[![CI Actions Status](https://github.com/microsoft/kiota-serialization-multipart-python/actions/workflows/build.yml/badge.svg?branch=main)](https://github.com/microsoft/kiota-serialization-multipart-python/actions) +[![Downloads](https://pepy.tech/badge/microsoft-kiota-serialization-multipart)](https://pepy.tech/project/microsoft-kiota-serialization-multipart) -> This repo has been populated by an initial template to help get you started. Please -> make sure to update the content to build a great experience for community-building. +The Multipart Serialization Library for Python is the python `multipart/form-data` serialization library implementation. -As the maintainer of this project, please make a few updates: +A [Kiota](https://github.com/microsoft/kiota) generated project will need a reference to a multipart serialization package to handle multipart payloads from a supporting API endpoint. -- Improving this README.MD file to provide a great experience -- Updating SUPPORT.MD with content about this project's support experience -- Understanding the security reporting process in SECURITY.MD -- Remove this section from the README +Read more about Kiota [here](https://github.com/microsoft/kiota/blob/main/README.md). + +## Using the Microsoft Kiota Multipart Serialization Library + +In order to use this library, install the package by running: + +```cmd +pip install microsoft-kiota-serialization-multipart +``` ## Contributing @@ -30,4 +37,4 @@ This project may contain trademarks or logos for projects, products, or services trademarks or logos is subject to and must follow [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. -Any use of third-party trademarks or logos are subject to those third-party's policies. +Any use of third-party trademarks or logos are subject to those third-party's policies. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..356ebc3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,45 @@ +[build-system] +requires = ["flit_core >=3.2,<4"] +build-backend = "flit_core.buildapi" + +[project] +name = "microsoft-kiota-serialization-multipart" +authors = [{name = "Microsoft", email = "graphtooling+python@microsoft.com"}] +dependencies = [ + "microsoft-kiota_abstractions >=1.0.0,<2.0.0", +] +license = {file = "LICENSE"} +readme = "README.md" +keywords = ["kiota", "openAPI", "Microsoft", "Graph"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", +] +dynamic = ["version", "description"] + +[project.urls] +homepage = "https://github.com/microsoft/kiota#readme" +repository = "https://github.com/microsoft/kiota-serialization-multipart-python" +documentation = "https://microsoft.github.io/kiota/" + +[tool.flit.module] +name = "kiota_serialization_multipart" + +[tool.mypy] +warn_unused_configs = true +files = "kiota_serialization_multipart" +ignore_missing_imports = true + +[tool.yapf] +based_on_style = "pep8" +dedent_closing_brackets = true +each_dict_entry_on_separate_line = true +column_limit = 100 + +[tool.isort] +profile = "hug" \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..3dde511 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,19 @@ +-i https://pypi.org/simple + +isort==5.13.2 + +mypy==1.8.0 + +pylint==3.1.0 + +pytest==8.0.2 + +pytest-cov==4.1.0 + +toml==0.10.2 + +typing-extensions==4.10.0 + +yapf==0.40.2 + +microsoft-kiota-abstractions==1.2.0 \ No newline at end of file From 10116a5c2285ec20e64d6405615db3b9789e1dda Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 14:57:31 +0300 Subject: [PATCH 2/8] Add tests --- kiota_serialization_multipart/__init__.py | 6 + kiota_serialization_multipart/_version.py | 1 + .../multipart_serialization_writer.py | 279 ++++++++++++++++++ .../multipart_serialization_writer_factory.py | 30 ++ tests/__init__.py | 0 tests/helpers/__init__.py | 2 + tests/helpers/test_entity.py | 83 ++++++ tests/helpers/test_enum.py | 10 + tests/unit/__init__.py | 0 ..._multipart_serialization_writer_factory.py | 32 ++ 10 files changed, 443 insertions(+) create mode 100644 kiota_serialization_multipart/__init__.py create mode 100644 kiota_serialization_multipart/_version.py create mode 100644 kiota_serialization_multipart/multipart_serialization_writer.py create mode 100644 kiota_serialization_multipart/multipart_serialization_writer_factory.py create mode 100644 tests/__init__.py create mode 100644 tests/helpers/__init__.py create mode 100644 tests/helpers/test_entity.py create mode 100644 tests/helpers/test_enum.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/test_multipart_serialization_writer_factory.py diff --git a/kiota_serialization_multipart/__init__.py b/kiota_serialization_multipart/__init__.py new file mode 100644 index 0000000..2af3c0b --- /dev/null +++ b/kiota_serialization_multipart/__init__.py @@ -0,0 +1,6 @@ +""" +Implementation of Kiota Serialization Interfaces for Multipart serialization +""" +from ._version import VERSION + +__version__ = VERSION diff --git a/kiota_serialization_multipart/_version.py b/kiota_serialization_multipart/_version.py new file mode 100644 index 0000000..6b47c14 --- /dev/null +++ b/kiota_serialization_multipart/_version.py @@ -0,0 +1 @@ +VERSION: str = '0.1.0' diff --git a/kiota_serialization_multipart/multipart_serialization_writer.py b/kiota_serialization_multipart/multipart_serialization_writer.py new file mode 100644 index 0000000..0db2e41 --- /dev/null +++ b/kiota_serialization_multipart/multipart_serialization_writer.py @@ -0,0 +1,279 @@ +from __future__ import annotations + +import io +from datetime import date, datetime, time, timedelta +from enum import Enum +from typing import Any, Callable, Dict, List, Optional, TypeVar +from uuid import UUID + +from kiota_abstractions.serialization import Parsable, SerializationWriter +from kiota_abstractions.multipart_body import MultipartBody + +T = TypeVar("T") +U = TypeVar("U", bound=Parsable) + + +class MultipartSerializationWriter(SerializationWriter): + + def __init__(self) -> None: + + self._stream: Optional[io.BytesIO] = io.BytesIO() + self.writer = io.TextIOWrapper( + buffer=self._stream, + encoding='utf-8', + line_buffering=True, # Set AutoFlush to True + newline="\r\n" # Set NewLine to "\r\n" as per HTTP spec + + ) + + self._on_start_object_serialization: Optional[Callable[[Parsable, SerializationWriter], + None]] = None + self._on_before_object_serialization: Optional[Callable[[Parsable], None]] = None + self._on_after_object_serialization: Optional[Callable[[Parsable], None]] = None + + def write_str_value(self, key: Optional[str], value: Optional[str]) -> None: + """Writes the specified string value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[str]): The string value to be written. + """ + if key: + self.writer.write(key) + if value: + if key: + self.writer.write(": ") + self.writer.write(value) + self.writer.write("\r\n") + + def write_bool_value(self, key: Optional[str], value: Optional[bool]) -> None: + """Writes the specified boolean value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[bool]): The boolean value to be written. + """ + raise NotImplementedError() + + def write_int_value(self, key: Optional[str], value: Optional[int]) -> None: + """Writes the specified integer value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[int]): The integer value to be written. + """ + raise NotImplementedError() + + def write_float_value(self, key: Optional[str], value: Optional[float]) -> None: + """Writes the specified float value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[float]): The float value to be written. + """ + raise NotImplementedError() + + def write_uuid_value(self, key: Optional[str], value: Optional[UUID]) -> None: + """Writes the specified uuid value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[UUId]): The uuid value to be written. + """ + raise NotImplementedError() + + def write_datetime_value(self, key: Optional[str], value: Optional[datetime]) -> None: + """Writes the specified datetime offset value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[datetime]): The datetime offset value to be written. + """ + raise NotImplementedError() + + def write_timedelta_value(self, key: Optional[str], value: Optional[timedelta]) -> None: + """Writes the specified timedelta value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[timedelta]): The timedelta value to be written. + """ + raise NotImplementedError() + + def write_date_value(self, key: Optional[str], value: Optional[date]) -> None: + """Writes the specified date value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[date]): The date value to be written. + """ + raise NotImplementedError() + + def write_time_value(self, key: Optional[str], value: Optional[time]) -> None: + """Writes the specified time value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[time]): The time value to be written. + """ + raise NotImplementedError() + + def write_bytes_value(self, key: Optional[str], value: bytes) -> None: + """Writes the specified byte array as a base64 string to the stream with an optional + given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (bytes): The byte array to be written. + """ + if value and len(value) > 0: + self._stream.write(value) + + def write_collection_of_primitive_values( + self, key: Optional[str], values: Optional[List[T]] + ) -> None: + """Writes the specified collection of primitive values to the stream with an optional + given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + values (Optional[List[T]]): The collection of primitive values to be written. + """ + raise NotImplementedError() + + def write_collection_of_enum_values( + self, key: Optional[str], values: Optional[List[Enum]] + ) -> None: + """Writes the specified collection of enum values to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + values Optional[List[Enum]): The enum values to be written. + """ + raise NotImplementedError() + + def write_enum_value(self, key: Optional[str], value: Optional[Enum]) -> None: + """Writes the specified enum value to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Optional[Enum]): The enum value to be written. + """ + raise NotImplementedError() + + def write_collection_of_object_values( + self, key: Optional[str], values: Optional[List[U]] + ) -> None: + """Writes the specified collection of model objects to the stream with an optional + given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + values (Optional[List[U]]): The collection of model objects to be written. + """ + raise NotImplementedError() + + def write_object_value( + self, key: Optional[str], value: Optional[U], *additional_values_to_merge: U + ) -> None: + """Writes the specified model object to the stream with an optional given key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + value (Parsable): The model object to be written. + additional_values_to_merge (tuple[Parsable]): The additional values to merge to the + main value when serializing an intersection wrapper. + """ + temp_writer = self._create_new_writer() + + if isinstance(value, MultipartBody): + self._serialize_value(temp_writer, value) + if self._on_after_object_serialization: + self._on_after_object_serialization(value) + + self.writer = temp_writer.writer + else: + raise ValueError(f"Expected a MultipartBody instance but got {type(value)}") + + def write_null_value(self, key: Optional[str]) -> None: + """Writes a null value for the specified key. + Args: + key (Optional[str]): The key to be used for the written value. May be null. + """ + raise NotImplementedError() + + def write_additional_data_value(self, value: Dict[str, Any]) -> None: + """Writes the specified additional data to the stream. + Args: + value (Dict[str, Any]): he additional data to be written. + """ + raise NotImplementedError() + + def get_serialized_content(self) -> bytes: + """Gets the value of the serialized content. + Returns: + bytes: The value of the serialized content. + """ + if self.writer: + self.writer.flush() + self._stream.seek(0) + return self._stream + + @property + def on_before_object_serialization(self) -> Optional[Callable[[Parsable], None]]: + """Gets the callback called before the object gets serialized. + Returns: + Optional[Callable[[Parsable], None]]:the callback called before the object + gets serialized. + """ + return self._on_before_object_serialization + + @on_before_object_serialization.setter + def on_before_object_serialization(self, value: Optional[Callable[[Parsable], None]]) -> None: + """Sets the callback called before the objects gets serialized. + Args: + value (Optional[Callable[[Parsable], None]]): the callback called before the objects + gets serialized. + """ + self._on_before_object_serialization = value + + @property + def on_after_object_serialization(self) -> Optional[Callable[[Parsable], None]]: + """Gets the callback called after the object gets serialized. + Returns: + Optional[Optional[Callable[[Parsable], None]]]: the callback called after the object + gets serialized. + """ + return self._on_after_object_serialization + + @on_after_object_serialization.setter + def on_after_object_serialization(self, value: Optional[Callable[[Parsable], None]]) -> None: + """Sets the callback called after the objects gets serialized. + Args: + value (Optional[Callable[[Parsable], None]]): the callback called after the objects + gets serialized. + """ + self._on_after_object_serialization = value + + @property + def on_start_object_serialization( + self + ) -> Optional[Callable[[Parsable, SerializationWriter], None]]: + """Gets the callback called right after the serialization process starts. + Returns: + Optional[Callable[[Parsable, SerializationWriter], None]]: the callback called + right after the serialization process starts. + """ + return self._on_start_object_serialization + + @on_start_object_serialization.setter + def on_start_object_serialization( + self, value: Optional[Callable[[Parsable, SerializationWriter], None]] + ) -> None: + """Sets the callback called right after the serialization process starts. + Args: + value (Optional[Callable[[Parsable, SerializationWriter], None]]): the callback + called right after the serialization process starts. + """ + self._on_start_object_serialization = value + + + def _serialize_value(self, temp_writer: MultipartSerializationWriter, value: U): + if on_before := self.on_before_object_serialization: + on_before(value) + if on_start := self.on_start_object_serialization: + on_start(value, self) + + value.serialize(temp_writer) + + def _create_new_writer(self) -> SerializationWriter: + writer = MultipartSerializationWriter() + writer.on_before_object_serialization = self.on_before_object_serialization + writer.on_after_object_serialization = self.on_after_object_serialization + writer.on_start_object_serialization = self.on_start_object_serialization + return writer diff --git a/kiota_serialization_multipart/multipart_serialization_writer_factory.py b/kiota_serialization_multipart/multipart_serialization_writer_factory.py new file mode 100644 index 0000000..08bafc2 --- /dev/null +++ b/kiota_serialization_multipart/multipart_serialization_writer_factory.py @@ -0,0 +1,30 @@ +from kiota_abstractions.serialization import SerializationWriter, SerializationWriterFactory + +from .multipart_serialization_writer import MultipartSerializationWriter + + +class MultipartSerializationWriterFactory(SerializationWriterFactory): + """A factory that creates MultipartSerializationWriter instances. + """ + + def get_valid_content_type(self) -> str: + """Gets the content type this factory creates serialization writers for. + Returns: + str: the content type this factory creates serialization writers for. + """ + return "multipart/form-data" + + def get_serialization_writer(self, content_type: str) -> SerializationWriter: + """Creates a new SerializationWriter instance for the given content type. + Args: + content_type (str): the content type to create a serialization writer for. + Returns: + SerializationWriter: A new SerializationWriter instance for the given content type. + """ + if not content_type: + raise TypeError("Content Type cannot be null") + valid_content_type = self.get_valid_content_type() + if valid_content_type.casefold() != content_type.casefold(): + raise TypeError(f"Expected {valid_content_type} as content type") + + return MultipartSerializationWriter() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py new file mode 100644 index 0000000..34ef0c8 --- /dev/null +++ b/tests/helpers/__init__.py @@ -0,0 +1,2 @@ +from .test_entity import TestEntity +from .test_enum import TestEnum \ No newline at end of file diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py new file mode 100644 index 0000000..a714b2e --- /dev/null +++ b/tests/helpers/test_entity.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from datetime import date, datetime, timedelta, time +from typing import Any, Callable, Dict, List, Optional, TypeVar +from uuid import UUID + +from kiota_abstractions.serialization import ( + AdditionalDataHolder, + Parsable, + ParseNode, + SerializationWriter, +) + +from .test_enum import TestEnum + +T = TypeVar('T') + +@dataclass +class TestEntity(Parsable, AdditionalDataHolder): + additional_data: Dict[str, Any] = field(default_factory=dict) + id: Optional[UUID] = None + device_names: Optional[List[str]] = None + numbers: Optional[TestEnum] = None + work_duration: Optional[timedelta] = None + birthday: Optional[date] = None + start_work_time: Optional[time] = None + end_work_time: Optional[time] = None + created_date_time: Optional[datetime] = None + office_location: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> TestEntity: + """ + Creates a new instance of the appropriate class based on discriminator value + Args: + parseNode: The parse node to use to read the discriminator value and create the object + Returns: Attachment + """ + if not parse_node: + raise TypeError("parse_node cannot be null") + return TestEntity() + + def get_field_deserializers(self) -> Dict[str, Callable[[ParseNode], None]]: + """Gets the deserialization information for this object. + + Returns: + Dict[str, Callable[[ParseNode], None]]: The deserialization information for this + object where each entry is a property key with its deserialization callback. + """ + return { + "id": lambda x: setattr(self, "id", x.get_uuid_value()), + "deviceNames": lambda x: setattr(self, "device_names", x.get_collection_of_primitive_values(str)), + "numbers": lambda x: setattr(self, "numbers", x.get_enum_value(TestEnum)), + "workDuration": lambda x: setattr(self, "work_duration", x.get_timedelta_value()), + "birthDay": lambda x: setattr(self, "birthday", x.get_date_value()), + "startWorkTime": lambda x: setattr(self, "start_work_time", x.get_time_value()), + "endWorkTime": lambda x: setattr(self, "end_work_time", x.get_time_value()), + "createdDateTime": lambda x: setattr(self, "created_date_time", x.get_datetime_value()), + "officeLocation": lambda x: setattr(self, "office_location", x.get_str_value()), + } + + def serialize(self, writer: SerializationWriter) -> None: + """Writes the objects properties to the current writer. + + Args: + writer (SerializationWriter): The writer to write to. + """ + if not writer: + raise TypeError("Writer cannot be null") + writer.write_uuid_value("id", self.id) + writer.write_collection_of_primitive_values("deviceNames", self.device_names) + writer.write_enum_value("numbers", self.numbers) + writer.write_timedelta_value("workDuration", self.work_duration) + writer.write_date_value("birthDay", self.birthday) + writer.write_time_value("startWorkTime", self.start_work_time) + writer.write_time_value("endWorkTime", self.end_work_time) + writer.write_datetime_value("createdDateTime", self.created_date_time) + writer.write_str_value("officeLocation", self.office_location) + writer.write_additional_data_value(self.additional_data) + + __test__ = False + \ No newline at end of file diff --git a/tests/helpers/test_enum.py b/tests/helpers/test_enum.py new file mode 100644 index 0000000..43fe680 --- /dev/null +++ b/tests/helpers/test_enum.py @@ -0,0 +1,10 @@ +from enum import Enum + +class TestEnum(Enum): + One ="one" + Two = "two" + Three = "three" + Four = "four" + Eight = "eight" + + __test__ = False \ No newline at end of file diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_multipart_serialization_writer_factory.py b/tests/unit/test_multipart_serialization_writer_factory.py new file mode 100644 index 0000000..277e6da --- /dev/null +++ b/tests/unit/test_multipart_serialization_writer_factory.py @@ -0,0 +1,32 @@ +import pytest + +from kiota_serialization_multipart.multipart_serialization_writer import MultipartSerializationWriter +from kiota_serialization_multipart.multipart_serialization_writer_factory import ( + MultipartSerializationWriterFactory, +) + +MULTIPART_CONTENT_TYPE = 'multipart/form-data' +def test_get_serialization_writer(): + factory = MultipartSerializationWriterFactory() + writer = factory.get_serialization_writer(MULTIPART_CONTENT_TYPE) + assert isinstance(writer, MultipartSerializationWriter) + + +def test_get_serialization_writer_no_content_type(): + with pytest.raises(TypeError) as e_info: + factory = MultipartSerializationWriterFactory() + factory.get_serialization_writer('') + assert str(e_info.value) == 'Content Type cannot be null' + + +def test_get_serialization_writer_unsupported_content_type(): + with pytest.raises(Exception) as e_info: + factory = MultipartSerializationWriterFactory() + factory.get_serialization_writer('application/xml') + assert str(e_info.value) == f'Expected {MULTIPART_CONTENT_TYPE} as content type' + + +def test_get_valid_content_type(): + factory = MultipartSerializationWriterFactory() + content_type = factory.get_valid_content_type() + assert content_type == MULTIPART_CONTENT_TYPE \ No newline at end of file From d5020fb331219064e753464b0e145069976252e5 Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 14:58:44 +0300 Subject: [PATCH 3/8] Add CHANGELOG entry --- CHANGELOG.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..f41fdf6 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,19 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## Unreleased + +### Added + +### Changed + +## [0.1.0] - 2024-02-27 + +### Added +- Added support for multipart serialization for Python.[microsoft/kiota#3030](https://github.com/microsoft/kiota/issues/3030) + +### Changed \ No newline at end of file From 74b0f6b37726894652ff80a02837835878df282f Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 15:17:46 +0300 Subject: [PATCH 4/8] Fix static checks issues --- .../multipart_serialization_writer.py | 14 +- .../test_multipart_serialization_writer.py | 193 ++++++++++++++++++ 2 files changed, 199 insertions(+), 8 deletions(-) create mode 100644 tests/unit/test_multipart_serialization_writer.py diff --git a/kiota_serialization_multipart/multipart_serialization_writer.py b/kiota_serialization_multipart/multipart_serialization_writer.py index 0db2e41..1f4a78b 100644 --- a/kiota_serialization_multipart/multipart_serialization_writer.py +++ b/kiota_serialization_multipart/multipart_serialization_writer.py @@ -6,8 +6,8 @@ from typing import Any, Callable, Dict, List, Optional, TypeVar from uuid import UUID -from kiota_abstractions.serialization import Parsable, SerializationWriter from kiota_abstractions.multipart_body import MultipartBody +from kiota_abstractions.serialization import Parsable, SerializationWriter T = TypeVar("T") U = TypeVar("U", bound=Parsable) @@ -16,14 +16,13 @@ class MultipartSerializationWriter(SerializationWriter): def __init__(self) -> None: - - self._stream: Optional[io.BytesIO] = io.BytesIO() + + self._stream: io.BytesIO = io.BytesIO() self.writer = io.TextIOWrapper( buffer=self._stream, encoding='utf-8', - line_buffering=True, # Set AutoFlush to True - newline="\r\n" # Set NewLine to "\r\n" as per HTTP spec - + line_buffering=True, # Set AutoFlush to True + newline="\r\n" # Set NewLine to "\r\n" as per HTTP spec ) self._on_start_object_serialization: Optional[Callable[[Parsable, SerializationWriter], @@ -194,7 +193,7 @@ def write_additional_data_value(self, value: Dict[str, Any]) -> None: """ raise NotImplementedError() - def get_serialized_content(self) -> bytes: + def get_serialized_content(self) -> io.BytesIO: """Gets the value of the serialized content. Returns: bytes: The value of the serialized content. @@ -262,7 +261,6 @@ def on_start_object_serialization( """ self._on_start_object_serialization = value - def _serialize_value(self, temp_writer: MultipartSerializationWriter, value: U): if on_before := self.on_before_object_serialization: on_before(value) diff --git a/tests/unit/test_multipart_serialization_writer.py b/tests/unit/test_multipart_serialization_writer.py new file mode 100644 index 0000000..1652645 --- /dev/null +++ b/tests/unit/test_multipart_serialization_writer.py @@ -0,0 +1,193 @@ +# from uuid import UUID +# from urllib.parse import unquote_plus +# import pytest + +# import pendulum +# from datetime import datetime, timedelta, date, time +# from kiota_serialization_multipart.multipart_serialization_writer import FormSerializationWriter +# from ..helpers import TestEntity, TestEnum + + +# @pytest.fixture +# def user_1(): +# user = TestEntity() +# user.created_date_time = pendulum.parse("2022-01-27T12:59:45.596117") +# user.work_duration = timedelta(seconds=7200) +# user.birthday = date(year=2000,month=9,day=4) +# user.start_work_time = time(hour=8, minute=0, second=0) +# user.id = UUID("8f841f30-e6e3-439a-a812-ebd369559c36") +# user.numbers = [TestEnum.One, TestEnum.Eight] +# user.device_names = ["device1", "device2"] +# user.additional_data = { +# "otherPhones": ["123456789","987654321"], +# "mobilePhone": None, +# "accountEnabled": False, +# "jobTitle": "Auditor", +# "intValue": 1, +# "floatValue": 3.14, +# } +# return user + + +# def test_write_str_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_str_value("displayName", "Adele Vance") +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "displayName=Adele+Vance" + + +# def test_write_bool_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_bool_value("isActive", False) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "isActive=false" + + +# def test_write_int_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_int_value("count", 0) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "count=0" + + +# def test_write_float_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_float_value("gpa", 0.0) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "gpa=0.0" + + +# def test_write_uuid_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_uuid_value("id", UUID("8f841f30-e6e3-439a-a812-ebd369559c36")) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "id=8f841f30-e6e3-439a-a812-ebd369559c36" + +# def test_write_uuid_value_with_valid_string(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_uuid_value("id", "8f841f30-e6e3-439a-a812-ebd369559c36") +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "id=8f841f30-e6e3-439a-a812-ebd369559c36" + +# def test_write_datetime_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_datetime_value("updatedAt", datetime(2022, 1, 27, 12, 59, 45, 596117)) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "updatedAt=2022-01-27T12%3A59%3A45.596117" + +# def test_write_datetime_value_valid_string(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_datetime_value( +# "updatedAt", "2022-01-27T12:59:45.596117" +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "updatedAt=2022-01-27T12%3A59%3A45.596117" + + +# def test_write_timedelta_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_timedelta_value( +# "diff", timedelta(seconds=7200)) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "diff=2%3A00%3A00" + + +# def test_write_date_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_date_value("birthday", date(2000, 9, 4)) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "birthday=2000-09-04" + +# def test_write_time_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_time_value( +# "time", +# pendulum.parse('2022-01-27T12:59:45.596117').time() +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "time=12%3A59%3A45.596117" + +# def test_write_bytes_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_bytes_value( +# "message", b"Hello world" +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "message=SGVsbG8gd29ybGQ%3D" + +# def test_write_collection_of_primitive_values(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_collection_of_primitive_values( +# "numbers", [1, 2.0, "3"] +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "numbers=1&numbers=2.0&numbers=3" + +# def test_write_collection_of_enum_values(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_collection_of_enum_values( +# "numbers", [TestEnum.Four, TestEnum.Eight] +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "numbers=four&numbers=eight" + +# def test_write_enum_value(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_enum_value( +# "number", TestEnum.Four +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "number=four" + +# def test_write_enum_value_multiple(): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_enum_value( +# "number", [TestEnum.Four, TestEnum.Eight] +# ) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == "number=four%2Ceight" + +# def test_write_collection_of_object_values(): +# form_serialization_writer = FormSerializationWriter() +# with pytest.raises(Exception) as excinfo: +# form_serialization_writer.write_collection_of_object_values( +# "users", [user_1] +# ) +# assert "Form serialization does not support collections." in str(excinfo.value) + + +# def test_write_object_value(user_1): +# form_serialization_writer = FormSerializationWriter() +# form_serialization_writer.write_object_value("user", user_1) +# content = form_serialization_writer.get_serialized_content() +# content_string = content.decode('utf-8') +# assert content_string == ( +# "user=id=8f841f30-e6e3-439a-a812-ebd369559c36&" +# "deviceNames=device1&deviceNames=device2&" +# "numbers=one%2Ceight&" +# "workDuration=2%3A00%3A00&" +# "birthDay=2000-09-04&" +# "startWorkTime=08%3A00%3A00&" +# "createdDateTime=2022-01-27T12%3A59%3A45.596117%2B00%3A00&" +# "otherPhones=123456789&otherPhones=987654321&" +# "jobTitle=Auditor&" +# "intValue=1&" +# "floatValue=3.14" +# ) + From fc4310b465eb32bf469190cccbc240776da2f728 Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 15:30:02 +0300 Subject: [PATCH 5/8] Remove obsolete tests --- .../test_multipart_serialization_writer.py | 193 ------------------ 1 file changed, 193 deletions(-) diff --git a/tests/unit/test_multipart_serialization_writer.py b/tests/unit/test_multipart_serialization_writer.py index 1652645..e69de29 100644 --- a/tests/unit/test_multipart_serialization_writer.py +++ b/tests/unit/test_multipart_serialization_writer.py @@ -1,193 +0,0 @@ -# from uuid import UUID -# from urllib.parse import unquote_plus -# import pytest - -# import pendulum -# from datetime import datetime, timedelta, date, time -# from kiota_serialization_multipart.multipart_serialization_writer import FormSerializationWriter -# from ..helpers import TestEntity, TestEnum - - -# @pytest.fixture -# def user_1(): -# user = TestEntity() -# user.created_date_time = pendulum.parse("2022-01-27T12:59:45.596117") -# user.work_duration = timedelta(seconds=7200) -# user.birthday = date(year=2000,month=9,day=4) -# user.start_work_time = time(hour=8, minute=0, second=0) -# user.id = UUID("8f841f30-e6e3-439a-a812-ebd369559c36") -# user.numbers = [TestEnum.One, TestEnum.Eight] -# user.device_names = ["device1", "device2"] -# user.additional_data = { -# "otherPhones": ["123456789","987654321"], -# "mobilePhone": None, -# "accountEnabled": False, -# "jobTitle": "Auditor", -# "intValue": 1, -# "floatValue": 3.14, -# } -# return user - - -# def test_write_str_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_str_value("displayName", "Adele Vance") -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "displayName=Adele+Vance" - - -# def test_write_bool_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_bool_value("isActive", False) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "isActive=false" - - -# def test_write_int_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_int_value("count", 0) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "count=0" - - -# def test_write_float_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_float_value("gpa", 0.0) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "gpa=0.0" - - -# def test_write_uuid_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_uuid_value("id", UUID("8f841f30-e6e3-439a-a812-ebd369559c36")) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "id=8f841f30-e6e3-439a-a812-ebd369559c36" - -# def test_write_uuid_value_with_valid_string(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_uuid_value("id", "8f841f30-e6e3-439a-a812-ebd369559c36") -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "id=8f841f30-e6e3-439a-a812-ebd369559c36" - -# def test_write_datetime_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_datetime_value("updatedAt", datetime(2022, 1, 27, 12, 59, 45, 596117)) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "updatedAt=2022-01-27T12%3A59%3A45.596117" - -# def test_write_datetime_value_valid_string(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_datetime_value( -# "updatedAt", "2022-01-27T12:59:45.596117" -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "updatedAt=2022-01-27T12%3A59%3A45.596117" - - -# def test_write_timedelta_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_timedelta_value( -# "diff", timedelta(seconds=7200)) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "diff=2%3A00%3A00" - - -# def test_write_date_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_date_value("birthday", date(2000, 9, 4)) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "birthday=2000-09-04" - -# def test_write_time_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_time_value( -# "time", -# pendulum.parse('2022-01-27T12:59:45.596117').time() -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "time=12%3A59%3A45.596117" - -# def test_write_bytes_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_bytes_value( -# "message", b"Hello world" -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "message=SGVsbG8gd29ybGQ%3D" - -# def test_write_collection_of_primitive_values(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_collection_of_primitive_values( -# "numbers", [1, 2.0, "3"] -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "numbers=1&numbers=2.0&numbers=3" - -# def test_write_collection_of_enum_values(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_collection_of_enum_values( -# "numbers", [TestEnum.Four, TestEnum.Eight] -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "numbers=four&numbers=eight" - -# def test_write_enum_value(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_enum_value( -# "number", TestEnum.Four -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "number=four" - -# def test_write_enum_value_multiple(): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_enum_value( -# "number", [TestEnum.Four, TestEnum.Eight] -# ) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == "number=four%2Ceight" - -# def test_write_collection_of_object_values(): -# form_serialization_writer = FormSerializationWriter() -# with pytest.raises(Exception) as excinfo: -# form_serialization_writer.write_collection_of_object_values( -# "users", [user_1] -# ) -# assert "Form serialization does not support collections." in str(excinfo.value) - - -# def test_write_object_value(user_1): -# form_serialization_writer = FormSerializationWriter() -# form_serialization_writer.write_object_value("user", user_1) -# content = form_serialization_writer.get_serialized_content() -# content_string = content.decode('utf-8') -# assert content_string == ( -# "user=id=8f841f30-e6e3-439a-a812-ebd369559c36&" -# "deviceNames=device1&deviceNames=device2&" -# "numbers=one%2Ceight&" -# "workDuration=2%3A00%3A00&" -# "birthDay=2000-09-04&" -# "startWorkTime=08%3A00%3A00&" -# "createdDateTime=2022-01-27T12%3A59%3A45.596117%2B00%3A00&" -# "otherPhones=123456789&otherPhones=987654321&" -# "jobTitle=Auditor&" -# "intValue=1&" -# "floatValue=3.14" -# ) - From 0e705cfc269005498d96c4f9039e093713e3e36a Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 19:35:16 +0300 Subject: [PATCH 6/8] Add tests --- .../multipart_serialization_writer.py | 12 +-- requirements-dev.txt | 4 +- tests/conftest.py | 41 ++++++++++ .../test_multipart_serialization_writer.py | 79 +++++++++++++++++++ 4 files changed, 129 insertions(+), 7 deletions(-) create mode 100644 tests/conftest.py diff --git a/kiota_serialization_multipart/multipart_serialization_writer.py b/kiota_serialization_multipart/multipart_serialization_writer.py index 1f4a78b..0fe8fc7 100644 --- a/kiota_serialization_multipart/multipart_serialization_writer.py +++ b/kiota_serialization_multipart/multipart_serialization_writer.py @@ -42,7 +42,7 @@ def write_str_value(self, key: Optional[str], value: Optional[str]) -> None: if key: self.writer.write(": ") self.writer.write(value) - self.writer.write("\r\n") + self.writer.write("\n") def write_bool_value(self, key: Optional[str], value: Optional[bool]) -> None: """Writes the specified boolean value to the stream with an optional given key. @@ -171,11 +171,9 @@ def write_object_value( temp_writer = self._create_new_writer() if isinstance(value, MultipartBody): - self._serialize_value(temp_writer, value) + self._serialize_value(self, value) if self._on_after_object_serialization: self._on_after_object_serialization(value) - - self.writer = temp_writer.writer else: raise ValueError(f"Expected a MultipartBody instance but got {type(value)}") @@ -193,7 +191,7 @@ def write_additional_data_value(self, value: Dict[str, Any]) -> None: """ raise NotImplementedError() - def get_serialized_content(self) -> io.BytesIO: + def get_serialized_content(self) -> bytes: """Gets the value of the serialized content. Returns: bytes: The value of the serialized content. @@ -201,7 +199,9 @@ def get_serialized_content(self) -> io.BytesIO: if self.writer: self.writer.flush() self._stream.seek(0) - return self._stream + x = self._stream.read() + print(x) + return x @property def on_before_object_serialization(self) -> Optional[Callable[[Parsable], None]]: diff --git a/requirements-dev.txt b/requirements-dev.txt index 3dde511..9712a4c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,4 +16,6 @@ typing-extensions==4.10.0 yapf==0.40.2 -microsoft-kiota-abstractions==1.2.0 \ No newline at end of file +microsoft-kiota-abstractions==1.2.0 + +microsoft-kiota-serialization-json==1.0.0 \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..7ade594 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,41 @@ +from uuid import UUID +from unittest.mock import Mock +import pytest + +from datetime import datetime, timedelta, date, time +from kiota_abstractions.multipart_body import MultipartBody +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_serialization_json.json_serialization_writer_factory import JsonSerializationWriterFactory +from kiota_serialization_multipart.multipart_serialization_writer import MultipartSerializationWriter +from .helpers import TestEntity, TestEnum + + +@pytest.fixture +def user_1(): + user = TestEntity() + user.created_date_time = datetime(2022, 1, 27, 12, 59, 45) + user.work_duration = timedelta(seconds=7200) + user.birthday = date(year=2017,month=9,day=4) + user.start_work_time = time(hour=0, minute=0, second=0) + user.id = UUID("eac79bd3-fd08-4abf-9df2-2565cf3a3845") + user.additional_data = { + "businessPhones": ["+1 412 555 0109"], + "mobilePhone": None, + "accountEnabled": False, + "jobTitle": "Auditor", + "manager": TestEntity(id=UUID("eac79bd3-fd08-4abf-9df2-2565cf3a3845")), + } + return user + +@pytest.fixture +def mock_request_adapter(): + request_adapter = Mock(spec=RequestAdapter) + return request_adapter + +@pytest.fixture +def mock_serialization_writer_factory(): + return JsonSerializationWriterFactory() + +@pytest.fixture +def mock_multipart_body(): + return MultipartBody() \ No newline at end of file diff --git a/tests/unit/test_multipart_serialization_writer.py b/tests/unit/test_multipart_serialization_writer.py index e69de29..88d9fab 100644 --- a/tests/unit/test_multipart_serialization_writer.py +++ b/tests/unit/test_multipart_serialization_writer.py @@ -0,0 +1,79 @@ +from uuid import UUID +from unittest.mock import Mock +import pytest + +from datetime import datetime, timedelta, date, time +from kiota_serialization_multipart.multipart_serialization_writer import MultipartSerializationWriter +from ..helpers import TestEntity, TestEnum + + +def test_not_implemented(): + writer = MultipartSerializationWriter() + with pytest.raises(NotImplementedError): + writer.write_bool_value("isActive", False) + with pytest.raises(NotImplementedError): + writer.write_date_value("birthday", date(2000, 9, 4)) + with pytest.raises(NotImplementedError): + writer.write_datetime_value("updatedAt", datetime(2022, 1, 27, 12, 59, 45, 596117)) + with pytest.raises(NotImplementedError): + writer.write_time_value("time", time(hour=12, minute=59, second=45, microsecond=596117)) + with pytest.raises(NotImplementedError): + writer.write_timedelta_value("diff", timedelta(seconds=7200)) + with pytest.raises(NotImplementedError): + writer.write_enum_value("number", TestEnum.Four) + with pytest.raises(NotImplementedError): + writer.write_float_value("gpa", 0.0) + with pytest.raises(NotImplementedError): + writer.write_int_value("count", 0) + with pytest.raises(NotImplementedError): + writer.write_uuid_value("id", UUID("8f841f30-e6e3-439a-a812-ebd369559c36")) + with pytest.raises(NotImplementedError): + writer.write_collection_of_enum_values("numbers", [TestEnum.Four, TestEnum.Eight]) + with pytest.raises(NotImplementedError): + writer.write_collection_of_object_values("users", [TestEntity()]) + with pytest.raises(NotImplementedError): + writer.write_collection_of_primitive_values("numbers", [1, 2.0, "3"]) + +def test_write_string_value(): + serialization_writer = MultipartSerializationWriter() + serialization_writer.write_str_value("message", "Hello world") + content = serialization_writer.get_serialized_content() + content_string = content.decode('utf-8') + assert content_string == "message: Hello world\r\n" + +def test_write_bytes_value_bytes(): + serialization_writer = MultipartSerializationWriter() + serialization_writer.write_bytes_value("", b"Hello world") + content = serialization_writer.get_serialized_content() + content_string = content.decode('utf-8') + assert content_string == "Hello world" + +def test_write_object_value_raises_exception_on_parseable_object(): + serialization_writer = MultipartSerializationWriter() + with pytest.raises(ValueError) as excinfo: + serialization_writer.write_object_value("user", TestEntity()) + assert "Expected a MultipartBody instance but got" in str(excinfo.value) + +def test_write_object_value(user_1, mock_request_adapter, mock_serialization_writer_factory, mock_multipart_body): + mock_request_adapter.get_serialization_writer_factory = Mock(return_value=mock_serialization_writer_factory) + mock_multipart_body.request_adapter = mock_request_adapter + mock_multipart_body.add_or_replace_part("test user", "application/json", user_1) + mock_multipart_body.add_or_replace_part("img", "application/octet-stream", b"Hello world") + + serialization_writer = MultipartSerializationWriter() + serialization_writer.write_object_value("", mock_multipart_body, None) + content = serialization_writer.get_serialized_content() + content_string = content.decode('utf-8') + assert content_string == f'--{mock_multipart_body.boundary}'+'\r\nContent-Type: application/json\r\nContent-Disposition: form-data; name="test user"\r\n\r\n{"id": "eac79bd3-fd08-4abf-9df2-2565cf3a3845", "workDuration": "2:00:00", "birthDay": "2017-09-04", "startWorkTime": "00:00:00", "createdDateTime": "2022-01-27T12:59:45", "businessPhones": ["+1 412 555 0109"], "mobilePhone": null, "accountEnabled": false, "jobTitle": "Auditor", "manager": {"id": "eac79bd3-fd08-4abf-9df2-2565cf3a3845"}}\r\n'+f'--{mock_multipart_body.boundary}'+'\r\nContent-Type: application/octet-stream\r\nContent-Disposition: form-data; name="img"\r\n\r\nHello world\r\n'+f'--{mock_multipart_body.boundary}--\r\n' + +def test_write_object_value_inverted(user_1, mock_request_adapter, mock_serialization_writer_factory, mock_multipart_body): + mock_request_adapter.get_serialization_writer_factory = Mock(return_value=mock_serialization_writer_factory) + mock_multipart_body.request_adapter = mock_request_adapter + mock_multipart_body.add_or_replace_part("img", "application/octet-stream", b"Hello world") + mock_multipart_body.add_or_replace_part("test user", "application/json", user_1) + + serialization_writer = MultipartSerializationWriter() + serialization_writer.write_object_value("", mock_multipart_body, None) + content = serialization_writer.get_serialized_content() + content_string = content.decode('utf-8') + assert content_string == f'--{mock_multipart_body.boundary}'+'\r\nContent-Type: application/octet-stream\r\nContent-Disposition: form-data; name="img"\r\n\r\nHello world\r\n'+f'--{mock_multipart_body.boundary}'+'\r\nContent-Type: application/json\r\nContent-Disposition: form-data; name="test user"\r\n\r\n{"id": "eac79bd3-fd08-4abf-9df2-2565cf3a3845", "workDuration": "2:00:00", "birthDay": "2017-09-04", "startWorkTime": "00:00:00", "createdDateTime": "2022-01-27T12:59:45", "businessPhones": ["+1 412 555 0109"], "mobilePhone": null, "accountEnabled": false, "jobTitle": "Auditor", "manager": {"id": "eac79bd3-fd08-4abf-9df2-2565cf3a3845"}}\r\n'+f'--{mock_multipart_body.boundary}--\r\n' \ No newline at end of file From dc1479efeb36d6c49e2f6341b4d059babf9d528f Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Wed, 28 Feb 2024 19:37:11 +0300 Subject: [PATCH 7/8] Remove debug statements --- .../multipart_serialization_writer.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/kiota_serialization_multipart/multipart_serialization_writer.py b/kiota_serialization_multipart/multipart_serialization_writer.py index 0fe8fc7..4b0916a 100644 --- a/kiota_serialization_multipart/multipart_serialization_writer.py +++ b/kiota_serialization_multipart/multipart_serialization_writer.py @@ -199,9 +199,7 @@ def get_serialized_content(self) -> bytes: if self.writer: self.writer.flush() self._stream.seek(0) - x = self._stream.read() - print(x) - return x + return self._stream.read() @property def on_before_object_serialization(self) -> Optional[Callable[[Parsable], None]]: From f1c08046c02e1c43b62ce4cbc828b591ab87ec91 Mon Sep 17 00:00:00 2001 From: samwelkanda Date: Fri, 1 Mar 2024 03:36:51 +0300 Subject: [PATCH 8/8] Use defensive --- .../multipart_serialization_writer.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/kiota_serialization_multipart/multipart_serialization_writer.py b/kiota_serialization_multipart/multipart_serialization_writer.py index 4b0916a..e4c7986 100644 --- a/kiota_serialization_multipart/multipart_serialization_writer.py +++ b/kiota_serialization_multipart/multipart_serialization_writer.py @@ -170,12 +170,11 @@ def write_object_value( """ temp_writer = self._create_new_writer() - if isinstance(value, MultipartBody): - self._serialize_value(self, value) - if self._on_after_object_serialization: - self._on_after_object_serialization(value) - else: + if not isinstance(value, MultipartBody): raise ValueError(f"Expected a MultipartBody instance but got {type(value)}") + self._serialize_value(self, value) + if self._on_after_object_serialization: + self._on_after_object_serialization(value) def write_null_value(self, key: Optional[str]) -> None: """Writes a null value for the specified key.