Skip to content

Commit

Permalink
Merge pull request #13 from NathanWorkman/release/v0.3.0
Browse files Browse the repository at this point in the history
Release/v0.3.0
  • Loading branch information
NathanWorkman authored Apr 9, 2018
2 parents fe9856d + 6f20c0d commit 614210f
Show file tree
Hide file tree
Showing 60 changed files with 5,329 additions and 169 deletions.
30 changes: 26 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,12 +1,34 @@

# general things to ignore
build/
dist/
*.egg-info/
*.egg
*.py[cod]
__pycache__/
*.so
*~
/venv
__pycache__
*.pyc
*.sqlite3
.DS_Store

# due to using tox and pytest
.tox
.cache
.pytest_cache

# node modules
/node_modules

# project dist static files
seeker/static
seeker/media
/node_modules

# project secrets
.env

# due to ansible logs
*.log
*.retry
.env
.DS_Store
htmlcov
12 changes: 12 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
language: python

python:
- "3.6"

install:
- pip install setuptools --upgrade
- pip install tox-travis
- pip install -r requirements.txt

script:
- tox
39 changes: 31 additions & 8 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -32,27 +32,32 @@ ECHO_GREEN = @echo "\033[33;32m $1\033[0m"
HOST ?= localhost:8000

reset: delete_sqlite migrate user run
setup: virtualenv requirements migrate user yarn build collectstatic

virtualenv:
# Create virtualenv
$(call ECHO_GREEN, Creating virtualenv... )
virtualenv -p python3 $(VIRTUALENV_NAME)

requirements:
# Install project requirements
$(call ECHO_GREEN, Installing requirements... )
( \
source venv/bin/activate;\
$(PIP_INSTALL_CMD) -r requirements.txt; \
)

migrate:
# Run django migrations
$(call ECHO_GREEN, Running migrations... )
( \
cd seeker; \
$(MANAGE_CMD) migrate; \
)

user:
# Create user account
$(call ECHO_GREEN, Creating super user... )
( \
cd seeker; \
echo "from django.contrib.auth.models import User; User.objects.create_superuser('admin', 'admin@email.com', 'pass')" | ./manage.py shell; \
Expand Down Expand Up @@ -90,15 +95,9 @@ migrations:
$(MANAGE_CMD) makemigrations; \
)

migrate:
# Run database migrations
( \
cd seeker; \
$(MANAGE_CMD) migrate; \
)

collectstatic:
# Collect static assets
$(call ECHO_GREEN, Collecting static assets...)
( \
cd seeker; \
$(MANAGE_CMD) collectstatic; \
Expand All @@ -109,10 +108,19 @@ run:
$(call ECHO_GREEN, Starting Django Server...)
( \
cd seeker; \
$(MANAGE_CMD) runserver; \
gulp; \
)


crawl:
# Run ALL scrapy spiders
$(call ECHO_GREEN, Running spiders... )
(\
cd seeker; \
python crawl.py; \
)

crawl_spider:
# Run scrapy spider
$(call ECHO_GREEN, Running $(spider) spider... )
(\
Expand All @@ -126,3 +134,18 @@ delete_sqlite:
cd seeker; \
rm -rf db.sqlite3;\
)

yarn:
# install npm modules
$(call ECHO_GREEN, Installing npm modules... )
( \
yarn; \
)

build:
# build static assets
$(call ECHO_GREEN, Compiling static assets... )
( \
cd seeker; \
gulp build; \
)
43 changes: 31 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# :sunglasses: Seeker [WIP]

[![Build Status](https://travis-ci.org/NathanWorkman/seeker.svg?branch=master)](https://travis-ci.org/NathanWorkman/seeker)

## What is Seeker?
Seeker aims not to be a job board for everyone, but a job board for you.

Expand All @@ -11,7 +13,19 @@ To change the search terms edit the query inside each spider.

## Setup

Some quick setup instructions
Some quick setup instructions:

You will need `yarn` and `virtualenv` installed on your machine.

Install Yarn
```
brew install yarn
```

Install virtualenv
```
pip install virtualenv
```

I would recommend installing [virtualenv](https://virtualenv.readthedocs.io/).

Expand All @@ -22,11 +36,12 @@ cd seeker/
virtualenv venv -p python3
source venv/bin/activate
pip install -r requirements.txt
yarn
cd seeker/
python manage.py migrate
python manage.py createsuperuser
python manage.py runserver
make build
make run
```

### To run the spiders
Expand Down Expand Up @@ -67,20 +82,24 @@ Navigate to the django admin to view your results.
- [ ] Celery Beat - run spiders on a schedule.

#### Spiders
Want a spider not listed here? Feel free to open a pull request and add it to the list or implement the spider yourself.
Want a spider not listed here? Feel free to open a pull request and add it to the list or implement the spider yourself.

- [x] [Stack Overflow](https://www.stackoverflow.com/jobs)
- [ ] [Indeed](https://www.indeed.com)
- [ ] [Dice](http://dice.com)
- [x] [Indeed](https://www.indeed.com)
- [ ] [Angel.co](https://angel.co/)
- [ ] [RemotePython](https://www.remotepython.com)
- [x] [RemotePython](https://www.remotepython.com)
- [ ] [DjangoJobs](https://djangojobs.net/jobs/)
- [ ] [DjangoGigs](https://djangogigs.com)
- [x] [DjangoGigs](https://djangogigs.com)
- [ ] [Jobspresso](http://jobspresso.co)
- [ ] [Authentic Jobs](http://authenticjobs.com/)
- [ ] [We Work Remotely](https://weworkremotely.com/)
- [ ] [Remotive](https://remotive.io)
- [ ] [Python.org](https://www.python.org/jobs/)

- [x] [Python.org](https://www.python.org/jobs/)
- [ ] [Working Nomads](https://www.workingnomads.co/jobs)
- [ ] [Remote Work Hub](https://remoteworkhub.com)
- [ ] [Telecommunity](http://remotejobs.telecommunity.net/#s=1)
- [ ] [Remote Base](https://remotebase.io/)
- [ ] [WFH](https://www.wfh.io)
- [ ] [Remote Ok](https://remoteok.io)
- [ ] [Remotely Awesome Job](https://www.remotelyawesomejobs.com/remote-django-jobs)



Expand Down
27 changes: 27 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"name": "seeker",
"version": "1.0.0",
"description": "Job Board Aggregator",
"main": "index.js",
"repository": "git@github.com:NathanWorkman/seeker.git",
"author": "Nathan Workman <nathancworkman@gmail.com>",
"license": "MIT",
"private": false,
"dependencies": {
"browser-sync": "^2.18.13",
"gulp": "^3.9.1",
"gulp-autoprefixer": "^4.0.0",
"gulp-concat": "^2.6.1",
"gulp-cssnano": "^2.1.2",
"gulp-load-plugins": "^1.5.0",
"gulp-notify": "^3.0.0",
"gulp-plumber": "^1.1.0",
"gulp-sass": "^3.1.0",
"gulp-sourcemaps": "^2.6.1",
"gulp-spawn": "^0.4.0",
"gulp-uglify": "^3.0.0",
"gulp-util": "^3.0.8",
"gulp-watch": "^4.3.11",
"node-sass": "^4.5.3"
}
}
7 changes: 6 additions & 1 deletion requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,9 @@ pip-tools==1.11.0
psycopg2==2.7.4
redis==2.10.6
Scrapy==1.5.0
scrapy-djangoitem==1.1.1
scrapy-djangoitem==1.1.1
pytest==3.5.0
pytest-django==3.1.2
flake8==3.5.0
tox==3.0.0
pytest-factoryboy==2.0.1
23 changes: 20 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
amqp==2.2.2 # via kombu
asn1crypto==0.24.0 # via cryptography
attrs==17.4.0 # via automat, service-identity
attrs==17.4.0 # via automat, pytest, service-identity
automat==0.6.0 # via twisted
billiard==3.5.0.3 # via celery
celery==4.1.0
Expand All @@ -15,39 +15,56 @@ cffi==1.11.5 # via cryptography
chardet==3.0.4 # via requests
click==6.7 # via pip-tools
constantly==15.1.0 # via twisted
cryptography==2.2.1 # via pyopenssl
cryptography==2.2.2 # via pyopenssl
cssselect==1.0.3 # via parsel, scrapy
django-anymail==2.0
django-debug-toolbar==1.9.1
django-dotenv==1.4.2
django-widget-tweaks==1.4.1
django==2.0.3
factory-boy==2.10.0 # via pytest-factoryboy
faker==0.8.12 # via factory-boy
first==2.0.1 # via pip-tools
flake8==3.5.0
gunicorn==19.7.1
hyperlink==18.0.0 # via twisted
idna==2.6 # via cryptography, hyperlink, requests
incremental==17.5.0 # via twisted
inflection==0.3.1 # via pytest-factoryboy
kombu==4.1.0 # via celery
lxml==4.2.1 # via parsel, scrapy
mccabe==0.6.1 # via flake8
more-itertools==4.1.0 # via pytest
parsel==1.4.0 # via scrapy
pip-tools==1.11.0
pluggy==0.6.0 # via pytest, tox
psycopg2==2.7.4
py==1.5.3 # via pytest, tox
pyasn1-modules==0.2.1 # via service-identity
pyasn1==0.4.2 # via pyasn1-modules, service-identity
pycodestyle==2.3.1 # via flake8
pycparser==2.18 # via cffi
pydispatcher==2.0.5 # via scrapy
pyflakes==1.6.0 # via flake8
pyopenssl==17.5.0 # via scrapy, service-identity
pytest-django==3.1.2
pytest-factoryboy==2.0.1
pytest==3.5.0
python-dateutil==2.7.2 # via faker
pytz==2018.3 # via celery, django
queuelib==1.5.0 # via scrapy
redis==2.10.6
requests==2.18.4 # via django-anymail
scrapy-djangoitem==1.1.1
scrapy==1.5.0
service-identity==17.0.0 # via scrapy
six==1.11.0 # via automat, cryptography, django-anymail, parsel, pip-tools, pyopenssl, scrapy, scrapy-djangoitem, w3lib
six==1.11.0 # via automat, cryptography, django-anymail, faker, more-itertools, parsel, pip-tools, pyopenssl, pytest, python-dateutil, scrapy, scrapy-djangoitem, tox, w3lib
sqlparse==0.2.4 # via django-debug-toolbar
text-unidecode==1.2 # via faker
tox==3.0.0
twisted==17.9.0 # via scrapy
urllib3==1.22 # via requests
vine==1.1.4 # via amqp
virtualenv==15.2.0 # via tox
w3lib==1.19.0 # via parsel, scrapy
zope.interface==4.4.3 # via twisted
5 changes: 0 additions & 5 deletions seeker/companies/apps.py

This file was deleted.

3 changes: 0 additions & 3 deletions seeker/companies/tests.py

This file was deleted.

File renamed without changes.
File renamed without changes.
5 changes: 5 additions & 0 deletions seeker/company/apps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from django.apps import AppConfig


class CompanyConfig(AppConfig):
name = 'company'
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Generated by Django 2.0.3 on 2018-03-27 00:40
# Generated by Django 2.0.3 on 2018-04-06 04:39

from django.db import migrations, models

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
12 changes: 12 additions & 0 deletions seeker/crawl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from scrapy.utils.project import get_project_settings
from scrapy.crawler import CrawlerProcess

setting = get_project_settings()
process = CrawlerProcess(setting)
# https://doc.scrapy.org/en/latest/topics/api.html#scrapy.crawler.CrawlerProcess

for spider in process.spiders.list():
print("Running spider %s" % (spider))
process.crawl(spider)

process.start()
Loading

0 comments on commit 614210f

Please sign in to comment.