Skip to content

Commit

Permalink
release 1.8.6
Browse files Browse the repository at this point in the history
  • Loading branch information
Canux CHENG authored and Canux CHENG committed Nov 25, 2019
1 parent 45918cb commit 7944b4e
Show file tree
Hide file tree
Showing 11 changed files with 269 additions and 6 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,16 @@ New Features:

docker
virtualbox
python3(WIP)
python3

## 1.8

python3 support.

New Features:

gitlab
firewall

## 1.9

Expand Down
10 changes: 10 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,31 +1,41 @@
asn1crypto==0.24.0
bcrypt==3.1.7
bleach==3.1.0
certifi==2019.6.16
cffi==1.12.3
chardet==3.0.4
colorama==0.4.1
cryptography==2.7
docker==4.0.2
docutils==0.15.2
enum34==1.1.6
idna==2.8
libvirt-python==5.5.0
ntlm-auth==1.3.0
paramiko==2.6.0
pika==1.1.0
pkginfo==1.5.0.1
pycparser==2.19
Pygments==2.4.2
pymongo==3.8.0
pymssql==2.1.4
PyMySQL==0.9.3
PyNaCl==1.3.0
python-iptables==0.14.0
pywinrm==0.3.0
PyYAML==5.1.1
readme-renderer==24.0
redis==3.2.1
requests==2.22.0
requests-ntlm==1.1.0
requests-toolbelt==0.9.1
robotframework==3.1.2
selenium==3.141.0
six==1.12.0
SQLAlchemy==1.3.5
tqdm==4.36.1
twine==2.0.0
urllib3==1.25.3
webencodings==0.5.1
websocket-client==0.56.0
xmltodict==0.12.0
2 changes: 1 addition & 1 deletion super_devops/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,5 @@
DESCRIPTION:
"""

__version__ = "1.8.4"
__version__ = "1.8.6"
__author__ = "Canux CHENG"
Empty file.
112 changes: 112 additions & 0 deletions super_devops/firewall/iptables_wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import sys
import logging

logger = logging.getLogger(__name__)

import iptc

class BaseIptables(object):
def __init__(self, table='filter'):
if table.upper() == "FILTER":
self.table_name = iptc.Table.FILTER
elif table.upper() == "NAT":
self.table_name = iptc.Table.NAT
elif table.upper() == "MANGLE":
self.table_name = iptc.Table.MANGLE
elif table.upper() == "RAW":
self.table_name = iptc.Table.RAW
elif table.upper() == "SECURITY":
self.table_name = iptc.Table.SECURITY
else:
raise ValueError("table not support!")
self.table = iptc.Table(self.table_name)

def delete_user_define_chain(self):
try:
logger.debug("delete all user define chain.")
self.table.flush()
except Exception:
raise

def clean_builtin_chain(self):
try:
logger.debug("delete all rules from builtin chain.")
for chain in self.table.chains:
if chain.is_builtin():
chain.flush()
except Exception:
raise

def set_policy_for_builtin_chain(self, policy="ACCEPT"):
try:
logger.debug("set policy for all builtin chain.")
for chain in self.table.chains:
if chain.is_builtin():
chain.set_policy(policy.upper())
except Exception:
raise

def check_rule_exist_on_chain(self, chain, rule_dict):
try:
chain = iptc.Chain(self.table, chain.upper())
for rule in chain.rules:
if iptc.easy.decode_iptc_rule(rule) == rule_dict:
break
else:
logger.debug("rule not exist.")
return False
logger.debug("rule exist.")
return True
except Exception:
raise

def delete_rule_from_chain(self, chain, rule_dict):
try:
logger.debug("delete rule from chain.")
chain = iptc.Chain(self.table, chain.upper())
chain.delete_rule(iptc.easy.encode_iptc_rule(rule_dict))
except Exception:
raise

def replace_rule_from_chain(self, chain, rule_dict):
try:
logger.debug("replace rule from chain.")
chain = iptc.Chain(self.table, chain.upper())
chain.replace_rule(iptc.easy.encode_iptc_rule(rule_dict))
except Exception:
raise

def append_rule_to_chain(self, chain, rule_dict):
try:
logger.debug("append rule to chain.")
chain = iptc.Chain(self.table, chain.upper())
chain.append_rule(iptc.easy.encode_iptc_rule(rule_dict))
except Exception:
raise

def insert_rule_to_chain(self, chain, rule_dict):
try:
logger.debug("insert rule to chain.")
chain = iptc.Chain(self.table, chain.upper())
chain.insert_rule(iptc.easy.encode_iptc_rule(rule_dict))
except Exception:
raise


if __name__ == "__main__":
rule_dict = {
'src': '172.20.0.0/16',
'dst': '!172.20.0.0/16',
'target': 'MASQUERADE'
}
it = BaseIptables('nat')
if it.check_rule_exist_on_chain('POSTROUTING', rule_dict):
it.delete_rule_from_chain('postrouting', rule_dict)


it = BaseIptables('filter')
it.delete_user_define_chain()
it.clean_builtin_chain()
it.set_policy_for_builtin_chain()


7 changes: 5 additions & 2 deletions super_devops/grafana/grafana_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,18 @@

class BaseGrafana(object):
def __init__(
self, grafana_url="http://localhost:3000/",
self, grafana_url="http://localhost:3000/", key=None,
username=None, password=None, domain=None
):
self.grafana_url = grafana_url
self.username = username
self.password = password
self.domain = domain

self.header = {'Content-Type': 'application/json'}
self.header = {
'Content-Type': 'application/json',
'Authorization': 'Bearer {}'.format(key)
}

def check_data_source_exist_by_name(self, name):
try:
Expand Down
57 changes: 57 additions & 0 deletions super_devops/tick_stack/influxdb_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,3 +131,60 @@ def create_retention_policy(
return False
except Exception:
raise

def show(self, ifql):
try:
payload = {
"q": ifql
}
with BaseRequests(
username=self.username,
password=self.password,
domain=self.domain
) as req:
res = req.get(
self.query_url, params=payload,
**{
'headers': self.header,
'timeout': 60,
'verify': False
}
)
logger.debug("show res: {}".format(res.content))
if res.status_code == 200:
return json.loads(str(res.content, "utf-8"))
else:
logger.error("show failed")
return None
except Exception:
raise

def query(self, db, ifql):
try:
url = urlparse.urljoin(
self.influxdb_url, "/query?db={}".format(db)
)
payload = {
"q": ifql
}
with BaseRequests(
username=self.username,
password=self.password,
domain=self.domain
) as req:
res = req.get(
url, params=payload,
**{
'headers': self.header,
'timeout': 60,
'verify': False
}
)
logger.debug("select res: {}".format(res.content))
if res.status_code == 200:
return json.loads(str(res.content, "utf-8"))
else:
logger.error("select failed.")
return None
except Exception:
raise
30 changes: 29 additions & 1 deletion super_devops/tick_stack/kapacitor_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,35 @@ def disable_smtp(self):
except Exception:
raise

# TODO
def update_task(self, id, data):
try:
url = self.base_url + "/tasks/{}".format(id)
logger.debug("url: {}".format(url))
with BaseRequests(
username=self.username, password=self.password,
domain=self.domain
) as req:
res = req.patch(url, data)
logger.debug(
"update task res: {}".format(res.content)
)
logger.debug(
"udpate task status_code: {}".format(
res.status_code)
)
if res.status_code == 200:
logger.debug(
"update task succeed."
)
return True
else:
logger.debug(
"update task failed."
)
return False
except Exception:
raise

def get_all_tasks(self):
"""Only enabled tasks can be listed"""
try:
Expand Down
24 changes: 24 additions & 0 deletions tests/test_grafana_wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import unittest

from super_devops.grafana.grafana_wrapper import BaseGrafana


class GrafanaTestCase(unittest.TestCase):
@unittest.skip('ignore')
def test_check_datasource_exist(self):
grafana = BaseGrafana("http://10.103.64.207:3000", "sandbox",
"password")
result = grafana.check_data_source_exist_by_name("MAF")
self.assertEqual(True, result, msg="check datasource exist failed.")

def test_check_datasource_exist(self):
grafana = BaseGrafana("http://10.103.64.207:3000",
'eyJrIjoiSU84dnphRGlOUTlaeGNCdTQ3clJlZjBxZG9IUFQ3cGoiLCJuIjoidGVzdCIsImlkIjoxfQ=='
)
result = grafana.check_data_source_exist_by_name("MAF")
self.assertEqual(True, result, msg="check datasource exist failed.")



if __name__ == "__main__":
unittest.main()
18 changes: 17 additions & 1 deletion tests/test_influxdb_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,36 @@


class InfluxdbTestCase(unittest.TestCase):
@unittest.skip('ignore')
def test_check_database_exist(self):
result = BaseInfluxdb().check_database_exist("sandboxav")
result = BaseInfluxdb().check_database_exist("devops")
self.assertEqual(True, result, msg="check database exist failed.")

@unittest.skip('ignore')
def test_create_database(self):
result = BaseInfluxdb().create_database("sandboxav")
self.assertEqual(True, result, msg="create database failed.")

@unittest.skip('ignore')
def test_create_rp(self):
result = BaseInfluxdb().create_retention_policy(
"sandboxav", "sandboxav", "30d", 2, True
)
self.assertEqual(True, result, msg="create rp failed.")

def test_query(self):
ifql = """
SELECT 100 - mean("usage_idle") AS "idle"
FROM "cpu"
WHERE "cpu" = 'cpu-total' AND time > now() - 5m
GROUP BY time(1m), "host" fill(none)
"""
result = BaseInfluxdb().query("devops", ifql)
self.assertIsNotNone(result, 'select failed.')
"""
{'results': [{'statement_id': 0, 'series': [{'name': 'cpu', 'tags': {'host': 'AT-MySQL'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.03125586093945287], ['2019-11-19T09:40:00Z', 0.03958997499701411], ['2019-11-19T09:41:00Z', 0.04375117218303615], ['2019-11-19T09:42:00Z', 0.04583723990482724], ['2019-11-19T09:43:00Z', 0.04167330867234398], ['2019-11-19T09:44:00Z', 0.04688125038921953]]}, {'name': 'cpu', 'tags': {'host': 'AT-Rabbitmq-Haproxy'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.2501250627643259], ['2019-11-19T09:40:00Z', 0.2669670173403347], ['2019-11-19T09:41:00Z', 0.35020860447122004], ['2019-11-19T09:42:00Z', 0.300250225104719], ['2019-11-19T09:43:00Z', 0.31704212966837986], ['2019-11-19T09:44:00Z', 0.37507522549957173]]}, {'name': 'cpu', 'tags': {'host': 'AT-Rabbitmq01'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.4509018040227346], ['2019-11-19T09:40:00Z', 0.48381715054661356], ['2019-11-19T09:41:00Z', 0.583450116553351], ['2019-11-19T09:42:00Z', 0.5669502336797478], ['2019-11-19T09:43:00Z', 0.5671756715035485], ['2019-11-19T09:44:00Z', 0.47571357079544896]]}, {'name': 'cpu', 'tags': {'host': 'AT-Rabbitmq02'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.15022533814712347], ['2019-11-19T09:40:00Z', 0.33358354216329644], ['2019-11-19T09:41:00Z', 0.38348345841518494], ['2019-11-19T09:42:00Z', 0.36691691687042294], ['2019-11-19T09:43:00Z', 0.4001001000847708], ['2019-11-19T09:44:00Z', 0.40028773124370787]]}, {'name': 'cpu', 'tags': {'host': 'AT-Redis'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.05003752819244767], ['2019-11-19T09:40:00Z', 0.141675032254156], ['2019-11-19T09:41:00Z', 0.11671252617077243], ['2019-11-19T09:42:00Z', 0.14165005411926757], ['2019-11-19T09:43:00Z', 0.10837710878057294], ['2019-11-19T09:44:00Z', 0.16255001763602195]]}, {'name': 'cpu', 'tags': {'host': 'backup-server'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 1.587996248276184], ['2019-11-19T09:40:00Z', 1.1777606965329852], ['2019-11-19T09:41:00Z', 1.2629336351638472], ['2019-11-19T09:42:00Z', 1.240201263942751], ['2019-11-19T09:43:00Z', 1.127428122710569], ['2019-11-19T09:44:00Z', 1.22249467921921]]}, {'name': 'cpu', 'tags': {'host': 'gitlab'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 3.8281103402784424], ['2019-11-19T09:40:00Z', 3.311290865158], ['2019-11-19T09:41:00Z', 3.5475162263316946], ['2019-11-19T09:42:00Z', 3.353584195154781], ['2019-11-19T09:43:00Z', 3.336089292237105], ['2019-11-19T09:44:00Z', 2.692893340444357]]}, {'name': 'cpu', 'tags': {'host': 'jenkins'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.1626524865668415], ['2019-11-19T09:40:00Z', 0.11885217371116141], ['2019-11-19T09:41:00Z', 0.1063542732930074], ['2019-11-19T09:42:00Z', 0.1125980023593911], ['2019-11-19T09:43:00Z', 0.10843003414701968], ['2019-11-19T09:44:00Z', 0.07194264214393797]]}, {'name': 'cpu', 'tags': {'host': 'repo-ubuntu16'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 16.326416465400754], ['2019-11-19T09:40:00Z', 16.987985865078528], ['2019-11-19T09:41:00Z', 16.027362414904218], ['2019-11-19T09:42:00Z', 15.930524600712772], ['2019-11-19T09:43:00Z', 16.177572516879493], ['2019-11-19T09:44:00Z', 18.010139417063854]]}, {'name': 'cpu', 'tags': {'host': 'research'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 30.47511595803742], ['2019-11-19T09:40:00Z', 36.42952133947917], ['2019-11-19T09:41:00Z', 24.29001018455432], ['2019-11-19T09:42:00Z', 3.689162605377419], ['2019-11-19T09:43:00Z', 9.95440521676737], ['2019-11-19T09:44:00Z', 12.720892342859742]]}, {'name': 'cpu', 'tags': {'host': 'samples'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 1.1638092845018377], ['2019-11-19T09:40:00Z', 1.5474272887059897], ['2019-11-19T09:41:00Z', 1.5473625546416798], ['2019-11-19T09:42:00Z', 1.3978749994601003], ['2019-11-19T09:43:00Z', 1.2135650349546694], ['2019-11-19T09:44:00Z', 1.4077570875834908]]}, {'name': 'cpu', 'tags': {'host': 'soniclinux-repo'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 0.5005004986481225], ['2019-11-19T09:40:00Z', 0.4839175766902031], ['2019-11-19T09:41:00Z', 0.8690839841601843], ['2019-11-19T09:42:00Z', 0.6685232929676204], ['2019-11-19T09:43:00Z', 1.0696469107433728], ['2019-11-19T09:44:00Z', 0.5005004986481225]]}, {'name': 'cpu', 'tags': {'host': 'workstation'}, 'columns': ['time', 'idle'], 'values': [['2019-11-19T09:39:00Z', 2.6376720900224626], ['2019-11-19T09:40:00Z', 2.71707167977236], ['2019-11-19T09:41:00Z', 7.531748826701985], ['2019-11-19T09:42:00Z', 9.060651847100033], ['2019-11-19T09:43:00Z', 7.538644905142789], ['2019-11-19T09:44:00Z', 2.6209794663899117]]}]}]}
"""


if __name__ == "__main__":
unittest.main()
10 changes: 10 additions & 0 deletions tests/test_redis_wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import unittest

from super_devops.nosql.redis_wrapper import BaseRedis


class RedisTestCase(unittest.TestCase):
def test_ping(self):
redis = BaseRedis('10.103.64.188')
print(redis.ping())

0 comments on commit 7944b4e

Please sign in to comment.