Compare commits

..

6 Commits

Author SHA1 Message Date
andrewso
2a2acbef03 [11.10.20] Passing SynId through AMQ and logging, removed graphql query 2020-10-12 15:05:32 +01:00
andrewso
d39996da0a [11.10.20] Passing SynId through AMQ and logging, removed graphql query 2020-10-12 14:48:52 +01:00
andrewso
abd53bce27 [11.10.20] syncId generation 2020-10-12 14:01:51 +01:00
andrewso
aeb332c15a [10.10.20] Reduced CPU limit 2020-10-11 14:07:09 +01:00
andrewso
d44ae664e8 [10.10.20] Reduced CPU limit 2020-10-11 14:03:03 +01:00
andrewso
25a01025b8 [07.10.20] Log timestamp with @ 2020-10-07 10:03:53 +01:00
5 changed files with 43 additions and 42 deletions

View File

@ -99,10 +99,10 @@ spec:
imagePullPolicy: Always
resources:
requests:
cpu: 32m
cpu: 25m
memory: 32Mi
limits:
cpu: 75m
cpu: 25m
memory: 64Mi
securityContext:
capabilities:

View File

@ -12,16 +12,13 @@ from pricing.exchanges.gemini import geminiPublicTicker, geminiVolAskBid, gemini
from pricing.exchanges.bitstamp import bitstampVolAskBid, bitstampHighLow, bitstampOpenClose
from pricing.exchanges.kraken import krakenVolAskBid, krakenHighLow, krakenOpenClose
from src.utils.databaseConnect import send
# from src.utils.databaseConnect import send
from src.utils.activemqConnect import activeMQSender
import json
import json, uuid
from src.utils.jsonLogger import log
btc_usd="/home/price-collector/src/resources/queries/V1_INSERT_NEW_PRICE_RECORD_BTC.graphql"
xmr_usd="/home/price-collector/src/resources/queries/V1_INSERT_NEW_PRICE_RECORD_XMR.graphql"
def averager(type):
timestamp = datetime.datetime.now() + datetime.timedelta(hours=1)
@ -77,23 +74,23 @@ def getOpenClose(type):
return open, close
def sendToGateway(c_type, timestamp, av_price, high, low, vol, o_price, c_price):
with open(btc_usd, 'r') as file:
data = file.read()
strippedTimestamp = timestamp.replace(minute=0, second=0, microsecond=0)
timestamp = '"'+strippedTimestamp.strftime('%Y-%m-%dT%H:%M:%S')+'"'
type = '"'+c_type+'"'
query = data % (timestamp, type, round(av_price, 2),
round(high, 2),
round(low, 2),
round(o_price, 2),
round(c_price, 2),
round(vol, 2))
log("Query sending down to db-gateway -- ({})".format(query), 'INFO')
# def sendToGateway(c_type, timestamp, av_price, high, low, vol, o_price, c_price):
#
# with open(btc_usd, 'r') as file:
# data = file.read()
#
# strippedTimestamp = timestamp.replace(minute=0, second=0, microsecond=0)
# timestamp = '"'+strippedTimestamp.strftime('%Y-%m-%dT%H:%M:%S')+'"'
# type = '"'+c_type+'"'
#
# query = data % (timestamp, type, round(av_price, 2),
# round(high, 2),
# round(low, 2),
# round(o_price, 2),
# round(c_price, 2),
# round(vol, 2))
#
# log("Query sending down to db-gateway -- ({})".format(query), 'INFO')
# status, response = send(query, log)
@ -122,11 +119,12 @@ def sentToArtemis(c_type, timestamp, av_price, high, low, vol, o_price, c_price)
messageJson = json.dumps(message, indent = 4)
log("Sending message to PricingSave queue", 'INFO')
log("Message: {}".format(message), 'INFO')
syncId = uuid.uuid4()
activeMQSender(messageJson)
log("Sending message to PricingSave queue", 'INFO', syncId)
log("Message: {}".format(message), 'INFO', syncId)
activeMQSender(messageJson, syncId)
def timeFunction():
global time
@ -147,8 +145,6 @@ def collector(c_type):
global time
time = timeFunction()
print(time)
av_price, timestamp = averager(c_type)
vol = getVol(c_type)
high, low = getHighLow(c_type)
@ -163,7 +159,7 @@ def collector(c_type):
# Dynamically Spin up Child process for each type wanting to track
def collectorMain(c_type):
log("== Historical Price Collector ==", 'INFO')
log("Starting Historical Price Collector", 'INFO')
collector(c_type)

View File

@ -1 +0,0 @@
mutation { createBtc(createdDate: %s, type: %s, average_price: %f, high_price: %f, low_price: %f, open_price: %f, close_price: %f, volume: %f){ id } }

View File

@ -17,14 +17,20 @@ class keys():
def returnKeys(self):
return self.addr, self.port, self.amqU, self.amqP
def activeMQSender(message):
def activeMQSender(message, syncId):
addr, port, mqUser, mqPass = keys().returnKeys()
log("Attempting Connection to Artemis...", 'INFO')
log("Attempting Connection to Artemis...", 'INFO', syncId)
con = stomp.Connection([(addr, port)], auto_content_length=False)
con.connect( mqUser, mqPass, wait=True)
con.send("PricingSave", message, content_type="application/json", headers={"Content-Type":"application/json"})
con.send("PricingSave",
message,
content_type="application/json",
headers={
"Content-Type":"application/json",
"X-CRYPTO-Sync-ID":syncId
})
con.disconnect()

View File

@ -8,10 +8,10 @@ import datetime
class CustomJsonFormatter(jsonlogger.JsonFormatter):
def add_fields(self, log_record, record, message_dict):
super(CustomJsonFormatter, self).add_fields(log_record, record, message_dict)
if not log_record.get('timestamp'):
if not log_record.get('@timestamp'):
# this doesn't use record.created, so it is slightly off
now = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
log_record['timestamp'] = now
log_record['@timestamp'] = now
if log_record.get('level'):
log_record['level'] = log_record['level'].upper()
else:
@ -23,18 +23,18 @@ def setup_logging(log_level='INFO'):
logger.setLevel(log_level)
logHandler = logging.StreamHandler()
formatter = CustomJsonFormatter('%(timestamp)s %(level)s %(name)s %(message)s')
formatter = CustomJsonFormatter('%(@timestamp)s %(level)s %(name)s %(message)s')
logHandler.setFormatter(formatter)
logger.addHandler(logHandler)
def log(message, level):
def log(message, level, syncId=""):
logger = logging.getLogger(__name__)
if level == 'INFO':
logger.info(message)
logger.info(message, extra={"X-CRYPTO-Sync-ID" : syncId})
elif level == 'WARN':
logger.warn(message)
logger.warn(message, extra={"X-CRYPTO-Sync-ID" : syncId})
elif level == 'ERR':
logger.error(message)
logger.error(message, extra={"X-CRYPTO-Sync-ID" : syncId})
elif level == 'DEBUG':
logger.debug(message)
logger.debug(message, extra={"X-CRYPTO-Sync-ID" : syncId})