Skip to content

Commit

Permalink
Merge pull request #149 from dbmi-bgm/kmp_no_toplevel_imports
Browse files Browse the repository at this point in the history
Get rid of the rest of the non-top-level imports
  • Loading branch information
netsettler authored Jul 14, 2020
2 parents 468eb48 + 2526894 commit b14714a
Show file tree
Hide file tree
Showing 27 changed files with 187 additions and 200 deletions.
5 changes: 1 addition & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,7 @@ before_install:
- node --version
- npm config set python /usr/bin/python2.7
install:
- pip install --upgrade pip
- pip install poetry
- poetry install
- make npm-setup
- make build
before_script:
- configure-kibana-index --es-endpoint search-fourfront-builds-uhevxdzfcv7mkm5pj5svcri3aq.us-east-1.es.amazonaws.com:80
script:
Expand Down
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ macpoetry-install: # Same as 'poetry install' except that on OSX Catalina, an e
bin/macpoetry-install

configure: # does any pre-requisite installs
pip install --upgrade pip
pip install poetry

macbuild: # builds for Catalina
Expand Down
11 changes: 3 additions & 8 deletions deploy/travis_after_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,10 @@
import json
import time
import logging
import urllib.request as urllib2

try:
from functools import reduce
except ImportError:
pass
from functools import reduce

try:
import urllib.request as urllib2
except ImportError:
import urllib2

log = logging.getLogger("travis.leader")
log.addHandler(logging.StreamHandler())
Expand All @@ -37,6 +31,7 @@
def is_leader(job_number):
return job_number.endswith('.1')


job_number = os.getenv(TRAVIS_JOB_NUMBER)

if not job_number:
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
# Note: Various modules refer to this system as "encoded", not "cgap-portal".
name = "encoded"
version = "2.2.0"
version = "2.2.1"
description = "Clinical Genomics Analysis Platform"
authors = ["4DN-DCIC Team <support@4dnucleome.org>"]
license = "MIT"
Expand Down Expand Up @@ -91,7 +91,7 @@ rutter = ">=0.2,<1"
s3transfer = "^0.2.0"
simplejson = "^3.17.0"
SPARQLWrapper = "1.7.6"
SQLAlchemy = "1.3.16"
SQLAlchemy = "1.3.16" # Pinned because >=1.3.17 is a problem
structlog = ">=18.1.0,<20"
submit4dn = "0.9.7"
subprocess-middleware = ">=0.3,<1"
Expand Down
18 changes: 9 additions & 9 deletions src/encoded/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import json
import hashlib
# import json
import logging # not used in Fourfront, but used in CGAP? -kmp 8-Apr-2020
import mimetypes
import netaddr
import os
import pkg_resources
import subprocess
import sys

Expand All @@ -10,9 +13,13 @@
from dcicutils.env_utils import get_mirror_env_from_context
from dcicutils.ff_utils import get_health_page
from pyramid.config import Configurator
from pyramid_localroles import LocalRolesAuthorizationPolicy
from pyramid.settings import asbool
from snovault.app import STATIC_MAX_AGE, session, json_from_path, configure_dbsession, changelogs, json_asset
from snovault.elasticsearch import APP_FACTORY
from webtest import TestApp
from .ingestion_listener import INGESTION_QUEUE
from .loadxl import load_all


if sys.version_info.major < 3:
Expand All @@ -24,10 +31,8 @@


def static_resources(config):
from pkg_resources import resource_filename
import mimetypes
mimetypes.init()
mimetypes.init([resource_filename('encoded', 'static/mime.types')])
mimetypes.init([pkg_resources.resource_filename('encoded', 'static/mime.types')])
config.add_static_view('static', 'static', cache_max_age=STATIC_MAX_AGE)
config.add_static_view('profiles', 'schemas', cache_max_age=STATIC_MAX_AGE)

Expand Down Expand Up @@ -67,8 +72,6 @@ def robots_txt(request):


def load_workbook(app, workbook_filename, docsdir):
from .loadxl import load_all
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'IMPORT',
Expand All @@ -84,7 +87,6 @@ def load_workbook(app, workbook_filename, docsdir):


def app_version(config):
import hashlib
if not config.registry.settings.get(APP_VERSION_REGISTRY_KEY):
# we update version as part of deployment process `deploy_beanstalk.py`
# but if we didn't check env then git
Expand Down Expand Up @@ -142,12 +144,10 @@ def main(global_config, **local_config):
settings['mirror_health'] = get_health_page(ff_env=mirror)
config = Configurator(settings=settings)

from snovault.elasticsearch import APP_FACTORY
config.registry[APP_FACTORY] = main # used by mp_indexer
config.include(app_version)

config.include('pyramid_multiauth') # must be before calling set_authorization_policy
from pyramid_localroles import LocalRolesAuthorizationPolicy
# Override default authz policy set by pyramid_multiauth
config.set_authorization_policy(LocalRolesAuthorizationPolicy())
config.include(session)
Expand Down
16 changes: 10 additions & 6 deletions src/encoded/commands/export_data.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,15 @@
from pyramid import paster
from urllib.parse import urlparse
import argparse
import json
import logging
import requests
import time

from multiprocessing.pool import ThreadPool
from pyramid import paster
from simplejson.scanner import JSONDecodeError
from socket import gaierror
import time
import requests, logging, json
from urllib.parse import urlparse


EPILOG = __doc__

Expand Down Expand Up @@ -96,7 +101,6 @@ def perform_request(uri, attempt = 1):


def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Export Data", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand All @@ -122,4 +126,4 @@ def main():


if __name__ == '__main__':
main()
main()
2 changes: 1 addition & 1 deletion src/encoded/commands/migrate_attachments_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Move attachment blobs to S3.
"""
import argparse
import copy
import logging
import transaction
Expand Down Expand Up @@ -47,7 +48,6 @@ def run(app):


def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Move attachment blobs to S3", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand Down
2 changes: 1 addition & 1 deletion src/encoded/commands/migrate_dataset_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Migrate dataset type
"""
import argparse
import logging
import transaction
from pyramid.paster import get_app
Expand Down Expand Up @@ -36,7 +37,6 @@ def run(app):


def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Migrate dataset type", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand Down
2 changes: 1 addition & 1 deletion src/encoded/commands/migrate_files_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Update files with AWS metadata
"""
import argparse
import json
import logging
import transaction
Expand Down Expand Up @@ -40,7 +41,6 @@ def run(app, files):


def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Migrate files to AWS", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand Down
9 changes: 6 additions & 3 deletions src/encoded/commands/profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,22 @@
%(prog)s development.ini --app-name app "/experiments/ENCSR000ADI/?format=json&datastore=database"
"""

import argparse
import logging
import cProfile
import pstats

from webtest import TestApp
from pyramid import paster


EPILOG = __doc__

logger = logging.getLogger(__name__)


def internal_app(configfile, app_name=None, username=None, accept_json=True):
from webtest import TestApp
from pyramid import paster
app = paster.get_app(configfile, app_name)
if not username:
username = 'IMPORT'
Expand Down Expand Up @@ -76,7 +80,6 @@ def run(testapp, method, path, data, warm_ups, filename, sortby, stats, callers,


def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is invalid
description="Update links and keys", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand Down
5 changes: 3 additions & 2 deletions src/encoded/commands/spreadsheet_to_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@
"""

from .. import loadxl
import argparse
import json
import os.path

from .. import loadxl

EPILOG = __doc__


Expand Down Expand Up @@ -45,7 +47,6 @@ def convert(filename, sheetname=None, outputdir=None, skip_blanks=False):


def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Convert spreadsheet to json list", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand Down
18 changes: 8 additions & 10 deletions src/encoded/dev_servers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,21 @@
%(prog)s development.ini --app-name app --init --clear
"""
from pkg_resources import resource_filename
from pyramid.paster import get_app, get_appsettings

import argparse
import atexit
import logging
import os.path
import select
import shutil
import subprocess
import sys
try:
import subprocess32 as subprocess
except ImportError:
import subprocess

from pkg_resources import resource_filename
from pyramid.paster import get_app, get_appsettings
from pyramid.path import DottedNameResolver
from snovault.elasticsearch import create_mapping
from snovault.tests import elasticsearch_fixture, postgresql_fixture


EPILOG = __doc__
Expand Down Expand Up @@ -67,7 +69,6 @@ def ingestion_listener_process(config_uri, app_name, echo=True):


def main():
import argparse
parser = argparse.ArgumentParser(
description="Run development servers", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
Expand All @@ -87,8 +88,6 @@ def main():
# get the config and see if we want to connect to non-local servers
config = get_appsettings(args.config_uri, args.app_name)

from snovault.tests import elasticsearch_fixture, postgresql_fixture
from snovault.elasticsearch import create_mapping
datadir = os.path.abspath(args.datadir)
pgdata = os.path.join(datadir, 'pgdata')
esdata = os.path.join(datadir, 'esdata')
Expand Down Expand Up @@ -131,7 +130,6 @@ def cleanup_process():
create_mapping.run(app, skip_indexing=True, purge_queue=False)

if args.init and args.load:
from pyramid.path import DottedNameResolver
load_test_data = app.registry.settings.get('load_test_data')
load_test_data = DottedNameResolver().resolve(load_test_data)
load_res = load_test_data(app)
Expand Down
34 changes: 13 additions & 21 deletions src/encoded/renderers.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from pkg_resources import resource_filename
from urllib.parse import urlencode
import json
import logging
import os
import psutil
import time

from functools import lru_cache
from pyramid.events import (
BeforeRender,
subscriber,
)
from pkg_resources import resource_filename
from pyramid.events import BeforeRender, subscriber
from pyramid.httpexceptions import (
HTTPMovedPermanently,
HTTPPreconditionFailed,
Expand All @@ -14,25 +16,16 @@
HTTPNotAcceptable,
HTTPServerError
)
from pyramid.response import Response
from pyramid.security import forget
from pyramid.settings import asbool
from pyramid.threadlocal import (
manager,
)
from pyramid.response import Response
from pyramid.traversal import (
split_path_info,
_join_path_tuple,
)

from pyramid.threadlocal import manager
from pyramid.traversal import split_path_info, _join_path_tuple
from snovault.validation import CSRFTokenError
from subprocess_middleware.tween import SubprocessTween
from subprocess_middleware.worker import TransformWorker
import logging
import os
import psutil
import time
import json
from urllib.parse import urlencode
from webob.cookies import Cookie


log = logging.getLogger(__name__)
Expand Down Expand Up @@ -222,7 +215,6 @@ def remove_expired_session_cookies_tween_factory(handler, registry):
their removal in security_tween_factory & authentication.py as well as client-side
(upon "Logout" action). If needed for some reason, can re-enable.
'''
from webob.cookies import Cookie

ignore = {
'/favicon.ico',
Expand Down
Loading

0 comments on commit b14714a

Please sign in to comment.