Fix pep8 error and warnings 57/98957/1
authorLianhao Lu <lianhao.lu@intel.com>
Fri, 29 Nov 2019 06:49:50 +0000 (14:49 +0800)
committerLianhao Lu <lianhao.lu@intel.com>
Fri, 29 Nov 2019 06:50:21 +0000 (14:50 +0800)
Change-Id: Ic32eefcfd4be4d57c9cf30eff0b3fa1aa730d2c0
Issue-ID: VNFSDK-532
Signed-off-by: Lianhao Lu <lianhao.lu@intel.com>
21 files changed:
tests/cli/test_cli.py
tests/packager/test_csar.py
tests/packager/test_manifest.py
tests/packager/test_toscameta.py
tests/packager/test_utils.py
tests/validator/test_toscaparser_validator.py
tests/validator/test_validate_utils.py
tests/vnfreq/test_pkg_reqs.py
tests/vnfreq/test_vnfreq.py
tox.ini
vnfsdk_pkgtools/cli/__main__.py
vnfsdk_pkgtools/packager/csar.py
vnfsdk_pkgtools/packager/manifest.py
vnfsdk_pkgtools/packager/toscameta.py
vnfsdk_pkgtools/packager/utils.py
vnfsdk_pkgtools/validator/__init__.py
vnfsdk_pkgtools/validator/toscaparser_validator.py
vnfsdk_pkgtools/validator/utils.py
vnfsdk_pkgtools/version.py
vnfsdk_pkgtools/vnfreq/__init__.py
vnfsdk_pkgtools/vnfreq/pkg_reqs.py

index b3fb8f0..5c830d3 100644 (file)
@@ -1,26 +1,27 @@
-#\r
-# Copyright (c) 2017 GigaSpaces Technologies Ltd. All rights reserved.\r
-#\r
-# Licensed under the Apache License, Version 2.0 (the "License"); you may\r
-# not use this file except in compliance with the License. You may obtain\r
-# a copy of the License at\r
-#\r
-#      http://www.apache.org/licenses/LICENSE-2.0\r
-#\r
-# Unless required by applicable law or agreed to in writing, software\r
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT\r
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\r
-# License for the specific language governing permissions and limitations\r
-# under the License.\r
-#\r
-\r
-import pytest\r
-from vnfsdk_pkgtools.cli import  __main__\r
-\r
-def test_main(capsys):\r
-    with pytest.raises(SystemExit):\r
-        args = __main__.parse_args(['csar-create', '-h'])\r
-        args.func(args)\r
-    out, err = capsys.readouterr()\r
-    assert out.startswith('usage:')\r
-\r
+#
+# Copyright (c) 2017 GigaSpaces Technologies Ltd. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+import pytest
+
+from vnfsdk_pkgtools.cli import __main__
+
+
+def test_main(capsys):
+    with pytest.raises(SystemExit):
+        args = __main__.parse_args(['csar-create', '-h'])
+        args.func(args)
+    out, err = capsys.readouterr()
+    assert out.startswith('usage:')
index b63a117..767a86c 100644 (file)
@@ -31,87 +31,86 @@ CSAR_ENTRY_FILE = 'test_entry.yaml'
 CSAR_OUTPUT_FILE = 'output.csar'
 
 Args = collections.namedtuple('Args',
-           ['source', 'entry', 'manifest', 'history', 'tests',
-            'licenses', 'digest', 'certificate', 'privkey', 'sol241'])
-
-
-ARGS_MANIFEST = {
-            'source': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': 'test_entry.mf',
-            'history': 'ChangeLog.txt',
-            'tests': 'Tests',
-            'licenses': 'Licenses',
-            'digest': None,
-            'certificate': None,
-            'privkey': None,
-            'sol241': False,
-        }
-
-ARGS_MANIFEST_DIGEST = {
-            'source': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': 'test_entry.mf',
-            'history': 'ChangeLog.txt',
-            'tests': 'Tests',
-            'licenses': 'Licenses',
-            'digest': 'sha-256',
-            'certificate': None,
-            'privkey': None,
-            'sol241': False,
-        }
-
-ARGS_MANIFEST_DIGEST_CERT = {
-            'source': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': 'test_entry.mf',
-            'history': 'ChangeLog.txt',
-            'tests': 'Tests',
-            'licenses': 'Licenses',
-            'digest': 'sha-256',
-            'certificate': 'test.crt',
-            'privkey': os.path.join(ROOT_DIR, 'tests', 'resources', 'signature', 'test.key'),
-            'sol241': False,
-        }
-
-ARGS_NO_MANIFEST = {
-            'source': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': None,
-            'history': None,
-            'tests': None,
-            'licenses': None,
-            'digest': None,
-            'certificate': None,
-            'privkey': None,
-            'sol241': True,
-        }
-
-INVALID_ARGS_NO_MANIFEST = {
-            'source': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': None,
-            'history': None,
-            'tests': None,
-            'licenses': None,
-            'digest': 'sha-256',
-            'certificate': None,
-            'privkey': None,
-            'sol241': True,
-        }
-
-INVALID_ARGS_NO_PRIVKEY = {
-            'source': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': 'test_entry.mf',
-            'history': None,
-            'tests': None,
-            'licenses': None,
-            'digest': None,
-            'certificate': 'test.crt',
-            'privkey': None,
-            'sol241': True,
-        }
+                              ['source', 'entry', 'manifest',
+                               'history', 'tests', 'licenses',
+                               'digest', 'certificate', 'privkey',
+                               'sol241'])
+
+
+ARGS_MANIFEST = {'source': CSAR_RESOURCE_DIR,
+                 'entry': CSAR_ENTRY_FILE,
+                 'manifest': 'test_entry.mf',
+                 'history': 'ChangeLog.txt',
+                 'tests': 'Tests',
+                 'licenses': 'Licenses',
+                 'digest': None,
+                 'certificate': None,
+                 'privkey': None,
+                 'sol241': False,
+                 }
+
+ARGS_MANIFEST_DIGEST = {'source': CSAR_RESOURCE_DIR,
+                        'entry': CSAR_ENTRY_FILE,
+                        'manifest': 'test_entry.mf',
+                        'history': 'ChangeLog.txt',
+                        'tests': 'Tests',
+                        'licenses': 'Licenses',
+                        'digest': 'sha-256',
+                        'certificate': None,
+                        'privkey': None,
+                        'sol241': False,
+                        }
+
+ARGS_MANIFEST_DIGEST_CERT = {'source': CSAR_RESOURCE_DIR,
+                             'entry': CSAR_ENTRY_FILE,
+                             'manifest': 'test_entry.mf',
+                             'history': 'ChangeLog.txt',
+                             'tests': 'Tests',
+                             'licenses': 'Licenses',
+                             'digest': 'sha-256',
+                             'certificate': 'test.crt',
+                             'privkey': os.path.join(ROOT_DIR, 'tests',
+                                                     'resources',
+                                                     'signature',
+                                                     'test.key'),
+                             'sol241': False,
+                             }
+
+ARGS_NO_MANIFEST = {'source': CSAR_RESOURCE_DIR,
+                    'entry': CSAR_ENTRY_FILE,
+                    'manifest': None,
+                    'history': None,
+                    'tests': None,
+                    'licenses': None,
+                    'digest': None,
+                    'certificate': None,
+                    'privkey': None,
+                    'sol241': True,
+                    }
+
+INVALID_ARGS_NO_MANIFEST = {'source': CSAR_RESOURCE_DIR,
+                            'entry': CSAR_ENTRY_FILE,
+                            'manifest': None,
+                            'history': None,
+                            'tests': None,
+                            'licenses': None,
+                            'digest': 'sha-256',
+                            'certificate': None,
+                            'privkey': None,
+                            'sol241': True,
+                            }
+
+INVALID_ARGS_NO_PRIVKEY = {'source': CSAR_RESOURCE_DIR,
+                           'entry': CSAR_ENTRY_FILE,
+                           'manifest': 'test_entry.mf',
+                           'history': None,
+                           'tests': None,
+                           'licenses': None,
+                           'digest': None,
+                           'certificate': 'test.crt',
+                           'privkey': None,
+                           'sol241': True,
+                           }
 
 
 def csar_write_test(args):
index 7fa3474..816d256 100644 (file)
@@ -32,13 +32,13 @@ METADATA_241 = '\n'.join(["metadata:",
                           "vnf_provider_id: test",
                           "vnf_package_version: 1.0",
                           "vnf_release_data_time: 2017-09-15T15:00:10+08:00",
-                        ])
+                          ])
 
 METADATA_MISSING_KEY = '\n'.join(["metadata:",
-                                   "vnf_product_name: test",
-                                   "vnf_provider_id: test",
-                                   "vnf_package_version:1.0",
-                                ])
+                                  "vnf_product_name: test",
+                                  "vnf_provider_id: test",
+                                  "vnf_package_version:1.0",
+                                  ])
 
 METADATA_MISSING = "vnf_product_name: test"
 
@@ -46,7 +46,7 @@ FILE_CONTENT = "needToBeHashed"
 FILE_DIGEST = '\n'.join(['Source: digest',
                          'Algorithm: SHA-256',
                          'Hash: 20a480339aa4371099f9503511dcc5a8051ce3884846678ced5611ec64bbfc9c',
-                       ])
+                         ])
 
 CMS = '\n'.join(['-----BEGIN CMS-----',
                  'MIICmAYJKoZIhvcNAQcCoIICiTCCAoUCAQExDTALBglghkgBZQMEAgEwCwYJKoZI',
@@ -64,11 +64,11 @@ CMS = '\n'.join(['-----BEGIN CMS-----',
                  'pqE+DUlSFyilc9CQWnSLubkHmM4dZnU7qnNoTBqplDYpOYH3WSNN9Cv322JusAzt',
                  'SzFEv182phI2C5pmjUnf7VG1WMKCH2WNtkYwMUCDcGvbHrh8n+kR8hL/BAs=',
                  '-----END CMS-----',
-                ])
+                 ])
 
 FILE_SOURCE_ONLY = '\n'.join(['Source: source1',
                               'Source: source2',
-                            ])
+                              ])
 
 NON_MANO_ARTIFACTS = '\n'.join(['non_mano_artifact_sets:',
                                 'foo_bar:',
@@ -76,7 +76,7 @@ NON_MANO_ARTIFACTS = '\n'.join(['non_mano_artifact_sets:',
                                 'prv.happy-nfv.cool:',
                                 'Source: happy/cool/123.html',
                                 'Source: happy/cool/cool.json',
-                              ])
+                                ])
 
 
 def test_metadata(tmpdir):
@@ -122,6 +122,7 @@ def test_missing_metadata(tmpdir):
         manifest.Manifest(p.dirname, 'test.mf')
     excinfo.match(r"Unrecognized file digest line vnf_product_name: test:")
 
+
 def test_digest(tmpdir):
     root = tmpdir.mkdir('csar')
     mf = root.join('test.mf')
@@ -133,6 +134,7 @@ def test_digest(tmpdir):
     assert m.digests['digest'][0] == "SHA-256"
     assert m.digests['digest'][1] == "20a480339aa4371099f9503511dcc5a8051ce3884846678ced5611ec64bbfc9c"
 
+
 def test_add_file(tmpdir):
     root = tmpdir.mkdir('csar')
     mf = root.join('test.mf')
@@ -145,6 +147,7 @@ def test_add_file(tmpdir):
     assert m.digests['digest'][0] == "SHA-256"
     assert m.digests['digest'][1] == "20a480339aa4371099f9503511dcc5a8051ce3884846678ced5611ec64bbfc9c"
 
+
 def test_update_to_file(tmpdir):
     root = tmpdir.mkdir('csar')
     mf = root.join('test.mf')
@@ -164,12 +167,14 @@ def test_update_to_file(tmpdir):
     assert len(list(m2.digests.keys())) == 2
     assert m2.signature == CMS
 
+
 def test_signature(tmpdir):
     p = tmpdir.mkdir('csar').join('test.mf')
     p.write(METADATA + "\n\n" + CMS)
     m = manifest.Manifest(p.dirname, 'test.mf')
     assert m.signature == CMS
 
+
 def test_illegal_signature(tmpdir):
     p = tmpdir.mkdir('csar').join('test.mf')
     p.write(METADATA + "\n\n" + CMS[:-17])
@@ -177,6 +182,7 @@ def test_illegal_signature(tmpdir):
         manifest.Manifest(p.dirname, 'test.mf')
     excinfo.match(r"Can NOT find end of sigature block")
 
+
 def test_signature_strip(tmpdir):
     p = tmpdir.mkdir('csar').join('test.mf')
     p.write(METADATA + "\n\n" + CMS)
@@ -188,6 +194,7 @@ def test_signature_strip(tmpdir):
     assert m2.signature is None
     os.unlink(newfile)
 
+
 def test_source_only(tmpdir):
     p = tmpdir.mkdir('csar').join('test.mf')
     p.write(METADATA + "\n\n" + FILE_SOURCE_ONLY)
@@ -195,6 +202,7 @@ def test_source_only(tmpdir):
     assert 'source1' in m.digests.keys()
     assert 'source2' in m.digests.keys()
 
+
 def test_non_mano_artifacts(tmpdir, mocker):
     mocker.patch('vnfsdk_pkgtools.packager.utils.check_file_dir')
     p = tmpdir.mkdir('csar').join('test.mf')
index be6a173..9c5a13b 100644 (file)
@@ -29,35 +29,32 @@ CSAR_RESOURCE_DIR = os.path.join(ROOT_DIR, 'tests', 'resources', 'csar')
 CSAR_ENTRY_FILE = 'test_entry.yaml'
 CSAR_OUTPUT_FILE = 'output.csar'
 
-ARGS_MANIFEST = {
-            'base_dir': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': 'test_entry.mf',
-            'changelog': 'ChangeLog.txt',
-            'licenses': 'Licenses',
-            'tests': 'Tests',
-            'certificate': None,
-        }
-
-ARGS_MANIFEST_CERTIFICATE = {
-            'base_dir': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': 'test_entry.mf',
-            'changelog': 'ChangeLog.txt',
-            'licenses': 'Licenses',
-            'tests': 'Tests',
-            'certificate': 'test.crt',
-        }
-
-ARGS_NO_MANIFEST = {
-            'base_dir': CSAR_RESOURCE_DIR,
-            'entry': CSAR_ENTRY_FILE,
-            'manifest': None,
-            'changelog': None,
-            'licenses': None,
-            'tests': None,
-            'certificate': None,
-        }
+ARGS_MANIFEST = {'base_dir': CSAR_RESOURCE_DIR,
+                 'entry': CSAR_ENTRY_FILE,
+                 'manifest': 'test_entry.mf',
+                 'changelog': 'ChangeLog.txt',
+                 'licenses': 'Licenses',
+                 'tests': 'Tests',
+                 'certificate': None,
+                 }
+
+ARGS_MANIFEST_CERTIFICATE = {'base_dir': CSAR_RESOURCE_DIR,
+                             'entry': CSAR_ENTRY_FILE,
+                             'manifest': 'test_entry.mf',
+                             'changelog': 'ChangeLog.txt',
+                             'licenses': 'Licenses',
+                             'tests': 'Tests',
+                             'certificate': 'test.crt',
+                             }
+
+ARGS_NO_MANIFEST = {'base_dir': CSAR_RESOURCE_DIR,
+                    'entry': CSAR_ENTRY_FILE,
+                    'manifest': None,
+                    'changelog': None,
+                    'licenses': None,
+                    'tests': None,
+                    'certificate': None,
+                    }
 
 
 def _validate_metadata(cls, expected):
@@ -110,6 +107,7 @@ def test_invalid_csar_version():
 
 FROM_FILE_CASES = ['TOSCA.meta.sol261', 'TOSCA.meta.sol241']
 
+
 def _prepare(target, metafile_path):
     shutil.copytree(CSAR_RESOURCE_DIR, target)
     os.mkdir(os.path.join(target, 'TOSCA-Metadata'))
@@ -117,6 +115,7 @@ def _prepare(target, metafile_path):
                                             'TOSCA-Metadata',
                                             'TOSCA.meta'))
 
+
 def test_create_from_file():
     for case in FROM_FILE_CASES:
         target = tempfile.mkdtemp()
index 450a526..8c504e4 100644 (file)
@@ -22,22 +22,24 @@ from vnfsdk_pkgtools.packager import utils
 from vnfsdk_pkgtools import util
 
 ROOT_DIR = util.get_project_root()
-RESOURCES_DIR= os.path.join(ROOT_DIR, 'tests', 'resources', 'signature')
+RESOURCES_DIR = os.path.join(ROOT_DIR, 'tests', 'resources', 'signature')
 
-MSG_FILE  = os.path.join(RESOURCES_DIR, 'manifest.mf')
+MSG_FILE = os.path.join(RESOURCES_DIR, 'manifest.mf')
 CERT_FILE = os.path.join(RESOURCES_DIR, 'test.crt')
-KEY_FILE  = os.path.join(RESOURCES_DIR, 'test.key')
+KEY_FILE = os.path.join(RESOURCES_DIR, 'test.key')
 
 CONTENT = b"needToBeHashed"
 SHA256 = "20a480339aa4371099f9503511dcc5a8051ce3884846678ced5611ec64bbfc9c"
 SHA512 = "dbed8672e752d51d0c7ca42050f67faf1534e58470bba96e787df5c4cf6a4f8ecf7ad45fb9307adbc5b9dec8432627d86b3eb1d3d43ee9c5e93f754ff2825320"
 
+
 def test_cal_file_hash(tmpdir):
     p = tmpdir.join("file_to_hash.txt")
     p.write(CONTENT)
     assert SHA512 == utils.cal_file_hash("", str(p), 'SHA512')
     assert SHA256 == utils.cal_file_hash(p.dirname, p.basename, 'sha256')
 
+
 def test_cal_file_hash_remote(mocker):
     class FakeRequest(object):
         def __init__(self, *args):
@@ -46,6 +48,7 @@ def test_cal_file_hash_remote(mocker):
     mocker.patch('requests.get', new=FakeRequest)
     assert SHA256 == utils.cal_file_hash("", "http://fake", 'sha256')
 
+
 def test_sign_verify_pairwise():
     cms = utils.sign(MSG_FILE, CERT_FILE, KEY_FILE)
     # We can't examine the exact content of cms because it contains timestamp
@@ -56,70 +59,61 @@ def test_sign_verify_pairwise():
 
 def test_verify_bad(tmpdir):
     cms = utils.sign(MSG_FILE, CERT_FILE, KEY_FILE)
-    
     p = tmpdir.join("file_msg.txt")
     p.write("BAD")
-    
     with pytest.raises(subprocess.CalledProcessError):
         utils.verify(str(p), CERT_FILE, cms, no_verify_cert=True)
 
 
-CHECK_FILE_CASES = [
-        {
-            'negative': False,
-            'params': {'root': RESOURCES_DIR,
-                       'entry': 'test.key',
-                       'msg': '',
-                       'check_for_non': False,
-                       'check_dir': False,
-                      }
-        },
-        {
-            'negative': False,
-            'params': {'root': RESOURCES_DIR,
-                       'entry': 'non-existing-file',
-                       'msg': '',
-                       'check_for_non': True,
-                       'check_dir': False,
-                      }
-        },
-        {
-            'negative': True,
-            'params': {'root': RESOURCES_DIR,
-                       'entry': 'non-existing-file',
-                       'msg': '',
-                       'check_for_non': False,
-                       'check_dir': False,
-                      }
-        },
-        {
-            'negative': False,
-            'params': {'root': ROOT_DIR,
-                       'entry': 'tests',
-                       'msg': '',
-                       'check_for_non': False,
-                       'check_dir': True,
-                      }
-        },
-        {
-            'negative': False,
-            'params': {'root': ROOT_DIR,
-                       'entry': 'non-existing-dir',
-                       'msg': '',
-                       'check_for_non': True,
-                       'check_dir': True,
-                      }
-        },
-        {
-            'negative': True,
-            'params': {'root': ROOT_DIR,
-                       'entry': 'non-existing-dir',
-                       'msg': '',
-                       'check_for_non': False,
-                       'check_dir': True,
-                      }
-        },
-        ]
+CHECK_FILE_CASES = [{'negative': False,
+                     'params': {'root': RESOURCES_DIR,
+                                'entry': 'test.key',
+                                'msg': '',
+                                'check_for_non': False,
+                                'check_dir': False,
+                                },
+                     },
+                    {'negative': False,
+                     'params': {'root': RESOURCES_DIR,
+                                'entry': 'non-existing-file',
+                                'msg': '',
+                                'check_for_non': True,
+                                'check_dir': False,
+                                }
+                     },
+                    {'negative': True,
+                     'params': {'root': RESOURCES_DIR,
+                                'entry': 'non-existing-file',
+                                'msg': '',
+                                'check_for_non': False,
+                                'check_dir': False,
+                                }
+                     },
+                    {'negative': False,
+                     'params': {'root': ROOT_DIR,
+                                'entry': 'tests',
+                                'msg': '',
+                                'check_for_non': False,
+                                'check_dir': True,
+                                }
+                     },
+                    {'negative': False,
+                     'params': {'root': ROOT_DIR,
+                                'entry': 'non-existing-dir',
+                                'msg': '',
+                                'check_for_non': True,
+                                'check_dir': True,
+                                }
+                     },
+                    {'negative': True,
+                     'params': {'root': ROOT_DIR,
+                                'entry': 'non-existing-dir',
+                                'msg': '',
+                                'check_for_non': False,
+                                'check_dir': True,
+                                }
+                     },
+                    ]
 
 
 def test_check_file_dir():
index 12a293b..55dfa61 100644 (file)
@@ -28,18 +28,21 @@ CSAR_PATH = os.path.join(RESOURCES_DIR, 'test_import.csar')
 HPA_PATH = os.path.join(RESOURCES_DIR, 'hpa.csar')
 BAD_HPA_PATH = os.path.join(RESOURCES_DIR, 'hpa_bad.csar')
 
+
 def test_validate(tmpdir):
     reader = csar._CSARReader(CSAR_PATH, str(tmpdir.mkdir('validate')))
     validator = toscaparser_validator.ToscaparserValidator()
     validator.validate(reader)
     assert hasattr(validator, 'tosca')
 
+
 def test_validate_hpa(tmpdir):
     reader = csar._CSARReader(HPA_PATH, str(tmpdir.mkdir('validate')))
     validator = toscaparser_validator.ToscaparserValidator()
     validator.validate(reader)
     assert hasattr(validator, 'tosca')
 
+
 def test_validate_hpa_bad(tmpdir):
     reader = csar._CSARReader(BAD_HPA_PATH, str(tmpdir.mkdir('validate')))
     validator = toscaparser_validator.ToscaparserValidator()
index 1d663e5..18da03d 100644 (file)
@@ -13,8 +13,6 @@
 # under the License.
 #
 
-import os
-
 from ruamel import yaml
 import pytest
 
@@ -24,6 +22,7 @@ CONTENT = "---\n1"
 
 BAD_CONTENT = "---\na: b\n- d"
 
+
 def test_load_definition(tmpdir):
     p = tmpdir.join("test_definition.yaml")
     p.write(CONTENT)
index 109e85c..42f812a 100644 (file)
@@ -13,8 +13,6 @@
 # under the License.
 #
 
-import pytest
-
 from vnfsdk_pkgtools import vnfreq
 
 
@@ -63,6 +61,7 @@ def test_R04298(mocker, tmpdir):
     check_result('R-04298', reader, None,
                  'No testing scripts found')
 
+
 def test_R26881(mocker, tmpdir):
     p1 = tmpdir.join('entry.yaml')
     p1.write("")
@@ -78,9 +77,9 @@ def test_R26881(mocker, tmpdir):
     validator.tosca.nodetemplates = [node]
     node.entity_tpl = {'artifacts': {'sw_image': {'file': 'image',
                                                   'type': 'tosca.artifacts.nfv.SwImage',
-                                                 }
-                                    }
-                      }
+                                                  }
+                                     }
+                       }
     check_result('R-26881', reader, validator, None)
 
 
@@ -89,4 +88,3 @@ def test_R35851(mocker):
     node = mocker.Mock()
     validator.tosca.nodetemplates = [node]
     check_result('R-35851', None, validator, None)
-
index 03404fc..30cca1b 100644 (file)
 # under the License.
 #
 
-import pytest
-
 from vnfsdk_pkgtools import vnfreq
 
+
 class FakeTester(vnfreq.TesterBase):
     ID = 'fake'
     DESC = 'fake'
diff --git a/tox.ini b/tox.ini
index 3fa6109..1d46a4a 100644 (file)
--- a/tox.ini
+++ b/tox.ini
 #
 
 [tox]
-envlist=py27,py3
+envlist=py27,py3,pep8
 
 [testenv]
+basepython = python3
 passenv =
     TRAVIS
     PYTHON
@@ -39,3 +40,10 @@ basepython = python2
 
 [testenv:py3]
 basepython = python3
+
+[testenv:pep8]
+deps = flake8<3.8,>=3.7.0
+commands = flake8 {toxinidir}/vnfsdk_pkgtools {toxinidir}/tests
+
+[flake8]
+ignore = E501
index c5653d7..d51c029 100644 (file)
@@ -17,7 +17,6 @@
 import sys
 import logging
 import argparse
-import os
 import shutil
 import tempfile
 
@@ -32,15 +31,17 @@ from vnfsdk_pkgtools import vnfreq
 def csar_create_func(namespace):
 
     csar.write(namespace.source,
-                        namespace.entry,
-                        namespace.destination,
-                        args=namespace)
+               namespace.entry,
+               namespace.destination,
+               args=namespace)
+
 
 def csar_open_func(namespace):
     csar.read(namespace.source,
               namespace.destination,
               namespace.no_verify_cert)
 
+
 def csar_validate_func(namespace):
     workdir = tempfile.mkdtemp()
     try:
@@ -68,10 +69,10 @@ def parse_args(args_list):
     """
     parser = argparse.ArgumentParser(description='VNF SDK CSAR manipulation tool')
     parser.add_argument('-v', '--verbose',
-            dest='verbosity',
-            action='count',
-            default=0,
-            help='Set verbosity level (can be passed multiple times)')
+                        dest='verbosity',
+                        action='count',
+                        default=0,
+                        help='Set verbosity level (can be passed multiple times)')
 
     subparsers = parser.add_subparsers(help='csar-create')
     csar_create = subparsers.add_parser('csar-create')
@@ -116,7 +117,6 @@ def parse_args(args_list):
         action='store_true',
         help='Generate SOL004 v2.4.1 csar for backward compatilibity')
 
-
     csar_open = subparsers.add_parser('csar-open')
     csar_open.set_defaults(func=csar_open_func)
     csar_open.add_argument(
@@ -131,7 +131,6 @@ def parse_args(args_list):
         action='store_true',
         help="Do NOT verify the signer's certificate")
 
-
     csar_validate = subparsers.add_parser('csar-validate')
     csar_validate.set_defaults(func=csar_validate_func)
     csar_validate.add_argument(
index 3b4cec6..f4dc7a5 100644 (file)
 
 import logging
 import os
-import pprint
 import tempfile
 import zipfile
 
 import requests
+from ruamel import yaml
 
 from vnfsdk_pkgtools.packager import manifest
 from vnfsdk_pkgtools.packager import toscameta
index a518f31..b6d01f6 100644 (file)
@@ -22,18 +22,20 @@ import udatetime
 
 from vnfsdk_pkgtools.packager import utils
 
-METADATA_KEYS = [ 'vnf_provider_id',
-                  'vnf_product_name',
-                  'vnf_release_date_time',
-                  'vnf_package_version']
-DIGEST_KEYS = [ 'Source', 'Algorithm', 'Hash' ]
+METADATA_KEYS = ['vnf_provider_id',
+                 'vnf_product_name',
+                 'vnf_release_date_time',
+                 'vnf_package_version']
+DIGEST_KEYS = ['Source', 'Algorithm', 'Hash']
 SUPPORTED_HASH_ALGO = ['SHA-256', 'SHA-512']
 
 NON_MANO_ARTIFACT_RE = re.compile(r'^[0-9a-z_-]+(\.[0-9a-z_-]+)*:$')
 
+
 class ManifestException(Exception):
     pass
 
+
 class Manifest(object):
     ' Manifest file in CSAR package'
     def __init__(self, root_path, manifest_path, sol241=False):
@@ -49,17 +51,17 @@ class Manifest(object):
         # non_mano_artifact dict
         #   :key = set identifier
         #   :value = list of files
-        self.sol241=sol241
+        self.sol241 = sol241
         self.non_mano_artifacts = {}
-        self.blocks = [ ]
+        self.blocks = []
         self._split_blocks()
         self._parse_all_blocks()
 
     @staticmethod
     def __split_line(s):
-        remain=s
+        remain = s
         try:
-            (key, value)=s.split(':', 1)
+            (key, value) = s.split(':', 1)
             value = value.strip()
             remain = None
         except ValueError:
@@ -72,7 +74,7 @@ class Manifest(object):
         Split manifest file into blocks, each block is seperated by a empty
         line or a line with only spaces and tabs.
         '''
-        block_content = [ ]
+        block_content = []
         with open(os.path.join(self.root, self.path), 'rU') as fp:
             for line in fp:
                 line = line.strip(' \t\n')
@@ -106,11 +108,11 @@ class Manifest(object):
             if key in METADATA_KEYS:
                 self.metadata[key] = value
             elif key == 'vnf_release_data_time':
-                #sol004 v2.4.1 compatibility
+                # sol004 v2.4.1 compatibility
                 self.metadata['vnf_release_date_time'] = value
             else:
                 raise ManifestException("Unrecognized metadata %s:" % line)
-        #validate metadata keys
+        # validate metadata keys
         missing_keys = set(METADATA_KEYS) - set(self.metadata.keys())
         if missing_keys:
             raise ManifestException("Missing metadata keys: %s" % ','.join(missing_keys))
@@ -119,7 +121,7 @@ class Manifest(object):
             udatetime.from_string(self.metadata['vnf_release_date_time'])
         except ValueError:
             raise ManifestException("Incorrect IETF RFC 3339 vnf_release_date_time: %s"
-                            % self.metadata['vnf_release_date_time'])
+                                    % self.metadata['vnf_release_date_time'])
 
     def parse_cms(self, lines):
         if '--END CMS--' not in lines[-1]:
@@ -138,12 +140,12 @@ class Manifest(object):
             if key == 'Source':
                 self.digests[value] = (None, None)
             elif key == 'Algorithm':
-                #validate algorithm
+                # validate algorithm
                 desc['Algorithm'] = desc['Algorithm'].upper()
                 if desc['Algorithm'] not in SUPPORTED_HASH_ALGO:
                     raise ManifestException("Unsupported hash algorithm: %s" % desc['Algorithm'])
 
-            #validate hash
+            # validate hash
             if desc.get('Algorithm') and desc.get('Hash') and desc.get('Source'):
                 hash = utils.cal_file_hash(self.root, desc['Source'], desc['Algorithm'])
                 if hash != desc['Hash']:
@@ -213,7 +215,7 @@ class Manifest(object):
             # empty line between digest and signature section
             ret += "\n"
         # signature
-        if  self.signature:
+        if self.signature:
             ret += self.signature
         return ret
 
@@ -242,7 +244,7 @@ class Manifest(object):
                 elif not skip:
                     lines.append(line)
         content = ''.join(lines)
-        tmpfile = tempfile.NamedTemporaryFile(mode='w',delete=False)
+        tmpfile = tempfile.NamedTemporaryFile(mode='w', delete=False)
         tmpfile.write(content)
         tmpfile.close()
         return tmpfile.name
index fc51f3c..9e304b0 100644 (file)
@@ -19,7 +19,7 @@ import logging
 import os
 import pprint
 
-from ruamel import yaml # @UnresolvedImport
+from ruamel import yaml
 import six
 
 from vnfsdk_pkgtools.packager import utils
@@ -49,11 +49,11 @@ class ToscaMeta(object):
     META_ENTRY_TESTS_DIR_KEY = 'ETSI-Entry-Tests'
     META_ENTRY_LICENSES_DIR_KEY = 'ETSI-Entry-Licenses'
     META_ENTRY_CERT_FILE_KEY = 'ETSI-Entry-Certificate'
-    REQUIRED_KEYS = [ META_FILE_VERSION_KEY, META_CSAR_VERSION_KEY,
-                      META_CREATED_BY_KEY, META_ENTRY_DEFINITIONS_KEY,
-                      META_ENTRY_MANIFEST_FILE_KEY, META_ENTRY_HISTORY_FILE_KEY,
-                      META_ENTRY_LICENSES_DIR_KEY,
-                    ]
+    REQUIRED_KEYS = [
+        META_FILE_VERSION_KEY, META_CSAR_VERSION_KEY,
+        META_CREATED_BY_KEY, META_ENTRY_DEFINITIONS_KEY,
+        META_ENTRY_MANIFEST_FILE_KEY, META_ENTRY_HISTORY_FILE_KEY,
+        META_ENTRY_LICENSES_DIR_KEY, ]
     OPTIONAL_KEYS = [META_ENTRY_TESTS_DIR_KEY, META_ENTRY_CERT_FILE_KEY]
 
     def __init__(self, base_dir, entry, manifest=None, changelog=None,
@@ -95,8 +95,8 @@ class ToscaMeta(object):
                                   metadata.get(META_ENTRY_DEFINITIONS_KEY))
         try:
             with open(entry_file) as f:
-                v = yaml.safe_load(f)['tosca_definitions_version']
-        except:
+                yaml.safe_load(f)['tosca_definitions_version']
+        except Exception:
             raise ValueError('Entry file {} is not a valid tosca simple yaml file'.format(entry_file))
 
         if metadata.get(self.META_ENTRY_MANIFEST_FILE_KEY):
@@ -180,13 +180,13 @@ class ToscaMeta241(ToscaMeta):
     META_ENTRY_TESTS_DIR_KEY = 'Entry-Tests'
     META_ENTRY_LICENSES_DIR_KEY = 'Entry-Licenses'
     META_ENTRY_CERT_FILE_KEY = 'Entry-Certificate'
-    REQUIRED_KEYS = [ META_FILE_VERSION_KEY, META_CSAR_VERSION_KEY,
-                      META_CREATED_BY_KEY, META_ENTRY_DEFINITIONS_KEY,
-                    ]
-    OPTIONAL_KEYS = [ META_ENTRY_MANIFEST_FILE_KEY, META_ENTRY_HISTORY_FILE_KEY,
-                      META_ENTRY_LICENSES_DIR_KEY,  META_ENTRY_TESTS_DIR_KEY,
-                      META_ENTRY_CERT_FILE_KEY,
-                    ]
+    REQUIRED_KEYS = [
+        META_FILE_VERSION_KEY, META_CSAR_VERSION_KEY,
+        META_CREATED_BY_KEY, META_ENTRY_DEFINITIONS_KEY, ]
+    OPTIONAL_KEYS = [
+        META_ENTRY_MANIFEST_FILE_KEY, META_ENTRY_HISTORY_FILE_KEY,
+        META_ENTRY_LICENSES_DIR_KEY, META_ENTRY_TESTS_DIR_KEY,
+        META_ENTRY_CERT_FILE_KEY, ]
 
 
 class ToscaMeta261(ToscaMeta):
@@ -219,4 +219,3 @@ def create_from_file(base_dir):
                meta_file_version=metadata.get(META_FILE_VERSION_KEY),
                meta_csar_version=metadata.get(META_CSAR_VERSION_KEY),
                meta_created_by=metadata.get(META_CREATED_BY_KEY))
-
index 539a242..5624287 100644 (file)
@@ -75,7 +75,7 @@ def cal_file_hash(root, path, algo):
 def _run_cmd(cmd, **kwargs):
     if isinstance(cmd, list):
         args = cmd
-    elif isinstance(cmd, string):
+    elif isinstance(cmd, str):
         args = [cmd]
     else:
         raise RuntimeError("cmd must be string or list")
@@ -96,11 +96,10 @@ def _run_cmd(cmd, **kwargs):
 def sign(msg_file, cert_file, key_file):
     args = ["openssl", "cms", "-sign", "-binary"]
     kwargs = {
-              '-in': os.path.abspath(msg_file),
-              '-signer': os.path.abspath(cert_file),
-              '-inkey': os.path.abspath(key_file),
-              '-outform': 'PEM',
-             }
+        '-in': os.path.abspath(msg_file),
+        '-signer': os.path.abspath(cert_file),
+        '-inkey': os.path.abspath(key_file),
+        '-outform': 'PEM', }
 
     return _run_cmd(args, **kwargs)
 
@@ -114,9 +113,8 @@ def verify(msg_file, cert_file, cms, no_verify_cert=False):
         f.write(cms)
         f.flush()
         kwargs = {
-                  '-in': f.name,
-                  '-inform': 'PEM',
-                  '-content': os.path.abspath(msg_file),
-                  '-certfile': os.path.abspath(cert_file),
-                 }
+            '-in': f.name,
+            '-inform': 'PEM',
+            '-content': os.path.abspath(msg_file),
+            '-certfile': os.path.abspath(cert_file), }
         return _run_cmd(args, **kwargs)
index c072543..269627a 100644 (file)
@@ -18,9 +18,9 @@ import abc
 import six
 from stevedore import driver
 
-
 NS = "vnfsdk.pkgtools.validator"
 
+
 def get_validator(params):
     """Get validate driver and load it.
 
@@ -40,7 +40,6 @@ class ValidatorBase(object):
     def __init__(self):
         self.tosca = None
 
-
     @abc.abstractmethod
     def validate(self, reader):
         """Validate the csar package.
index d1ccd9a..851effb 100644 (file)
@@ -49,7 +49,7 @@ class ToscaparserValidator(validator.ValidatorBase):
         self.hpa_schema_version = defs.get('metadata', {}).get('version')
         self.hpa_schemas = defs.get('schemas', {})
         self.hpa_mappings = defs.get('mappings', [])
-        #validate schema defined in hpa.yaml is correct
+        # validate schema defined in hpa.yaml is correct
         if not self.hpa_schema_version:
             msg = "No defined version in metadata"
             raise HpaSchemaDefError(msg)
@@ -64,9 +64,9 @@ class ToscaparserValidator(validator.ValidatorBase):
 
     def validate(self, reader):
         entry_path = os.path.join(reader.destination,
-                                       reader.entry_definitions)
+                                  reader.entry_definitions)
         try:
-            #TODO set debug_mode due to upstream bug
+            # TODO set debug_mode due to upstream bug
             #     https://jira.opnfv.org/browse/PARSER-181
             self.tosca = ToscaTemplate(path=entry_path,
                                        no_required_paras_check=True,
@@ -126,10 +126,10 @@ class ToscaparserValidator(validator.ValidatorBase):
         for (key, hpa_value) in six.iteritems(value):
             if key not in hpa_schema:
                 msg = "node %s: %s is NOT a valid HPA key"
-                raise HpaValueError(msg  % (refkey, key))
+                raise HpaValueError(msg % (refkey, key))
             try:
                 hpa_dict = json.loads(hpa_value)
-            except:
+            except Exception:
                 msg = "node %s, HPA key %s: %s is NOT a valid json encoded string"
                 raise HpaValueError(msg % (refkey, key, hpa_value.encode('ascii', 'replace')))
             if not isinstance(hpa_dict, dict):
@@ -145,7 +145,7 @@ class ToscaparserValidator(validator.ValidatorBase):
                 attr_schema = hpa_schema[key][attr]
                 if not re.match(attr_schema, str(val)):
                     msg = ("node %s, HPA key %s, attr %s: %s is not a valid HPA "
-                          "attr value, expected re pattern is %s")
+                           "attr value, expected re pattern is %s")
                     raise HpaValueError(msg % (refkey, key, attr, val.encode('ascii', 'replace'), attr_schema))
 
     def validate_hpa_value(self, refkey, hpa_schema, values):
@@ -167,5 +167,3 @@ class ToscaparserValidator(validator.ValidatorBase):
                         self.validate_hpa_value(refkey,
                                                 self.hpa_schemas[mapping['schema']],
                                                 value)
-
-
index b26a290..32575a2 100644 (file)
@@ -36,10 +36,10 @@ def load_definitions(config_file, defaults=None):
             if hasattr(err, 'problem_mark'):
                 mark = err.problem_mark
                 errmsg = ("Invalid YAML syntax in Definitions file %(file)s "
-                           "at line: %(line)s, column: %(column)s." %
-                           dict(file=config_file,
-                                line=mark.line + 1,
-                                column=mark.column + 1))
+                          "at line: %(line)s, column: %(column)s." %
+                          dict(file=config_file,
+                               line=mark.line + 1,
+                               column=mark.column + 1))
             else:
                 errmsg = ("YAML error reading Definitions file %s" % config_file)
             LOG.error(errmsg)
index 00126b7..a2b63ea 100644 (file)
@@ -1,3 +1,3 @@
 global __version__
 
-__version__='1.4.0pre'
+__version__ = '1.4.0pre'
index 4c27495..70e3294 100644 (file)
@@ -20,9 +20,9 @@ import prettytable
 import six
 from stevedore import driver
 
-
 NS = "vnfsdk.pkgtools.vnfreq"
 
+
 def get_vnfreq_tester(name):
     """Get vnf requirement tester.
 
@@ -87,4 +87,3 @@ class TesterBase(object):
             self.err = self._do_check(reader, tosca)
         except Exception as e:
             self.err = e
-
index 4744eac..cf880e4 100644 (file)
 # under the License.
 #
 
-import abc
 import os
 
 import six
-from stevedore import driver
 
 from vnfsdk_pkgtools.packager import toscameta
-from vnfsdk_pkgtools.validator import toscaparser_validator as tv
 from vnfsdk_pkgtools import vnfreq
 
 
@@ -90,7 +87,7 @@ class R26881(vnfreq.TesterBase):
                     if file and \
                        os.path.isfile(os.path.join(entry_path, file)) or \
                        os.path.isfile(os.path.join(reader.destination, file)):
-                           valid_artifacts.append(file)
+                        valid_artifacts.append(file)
         if not valid_artifacts:
             raise vnfreq.VnfRequirementError("No valid binaries or images for VNF instantion found")
         return 0
@@ -111,9 +108,8 @@ class R35851(vnfreq.TesterBase):
         for node in getattr(tosca.tosca, 'nodetemplates', []):
             if tosca.is_type(node, 'tosca.nodes.nfv.VduCp') or \
                tosca.is_type(node, 'tosca.nodes.nfv.VnfVirtualLink'):
-                   found = True
-                   break
+                found = True
+                break
         if not found:
             raise vnfreq.VnfRequirementError("No basic network or application connectivity found")
         return 0
-