From a3d1ba631033fae3dc270bb67b85bfd950a02877 Mon Sep 17 00:00:00 2001 From: Jeffrey Paul Date: Mon, 11 Mar 2013 23:22:36 +0100 Subject: [PATCH] new stuff --- checkcert/checkcert | 12 +++++++-- fetchtweets/fetchtweets | 18 +++++++++++++ scrapers/amexscraper.py | 59 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 87 insertions(+), 2 deletions(-) create mode 100755 fetchtweets/fetchtweets create mode 100755 scrapers/amexscraper.py diff --git a/checkcert/checkcert b/checkcert/checkcert index 5704db5..2d3ffd7 100755 --- a/checkcert/checkcert +++ b/checkcert/checkcert @@ -103,9 +103,17 @@ class Website(object): raise ReachabilityProblem("can't access: '%s'" % self.urlstring()) if self.is_tls(): self._get_cert() - if self.cert.expiresSoon() or not self.cert.validTime(): + if not self.cert.validTime(): raise CertificateProblem( - "cert for %s is invalid or expires soon: %s" % ( + "cert for %s is invalid: %s to %s" % ( + self.urlstring(), + self.cert.notBefore(), + self.cert.notAfter() + ) + ) + if self.cert.expiresSoon(): + raise CertificateProblem( + "cert for %s expires soon: %s" % ( self.urlstring(), self.cert.notAfter() ) diff --git a/fetchtweets/fetchtweets b/fetchtweets/fetchtweets new file mode 100755 index 0000000..e83fc38 --- /dev/null +++ b/fetchtweets/fetchtweets @@ -0,0 +1,18 @@ +#!/usr/bin/env python + +from pprint import pformat + +# lol https://gist.github.com/re4k/3878505 +APIKEY = '3rJOl1ODzm9yZy63FACdg' +APISECRET = '5jPoQ5kQvMJFDYRNE8bQ4rHuds4xJqhvgNJM4awaE8' + +import twitter +api = twitter.Api() +# consumer_key = APIKEY, +# consumer_secret = APISECRET +#) + +statuses = api.GetUserTimeline('sneakatdatavibe') +print pformat([s.__dict__ for s in statuses]) + +#print api.VerifyCredentials() diff --git a/scrapers/amexscraper.py b/scrapers/amexscraper.py new file mode 100755 index 0000000..f2bcb26 --- /dev/null +++ b/scrapers/amexscraper.py @@ -0,0 +1,59 @@ +#!/Users/sneak/dev/venv-2.7/bin/python +from pprint import pformat +import os +import re +import json +import logging +logging.basicConfig(level=logging.ERROR) +log = logging.getLogger() +from ofxclient.request import Builder + +url = 'https://online.americanexpress.com/myca/ofxdl/desktop/' + \ + 'desktopDownload.do?request_type=nl_ofxdownload' + +# this exists because ofxclient is tightly coupled with their "Institution" +# class which shits all over my home directory with caching and +# credential storage that I don't want +class MockAmexInstitution(object): + def __init__(self,user=None,password=None): + self.username = user + self.password = password + self.dsn = { + 'url': url, + 'org': 'AMEX', + 'fid': '3101', + } + +class AmexScraper(object): + def __init__(self,*args,**kwargs): + self.user = kwargs.pop('user') + self.password = kwargs.pop('password') + + def scrape(self): + i = MockAmexInstitution( + user=self.user, + password=self.password + ) + b = Builder(i) + r = b.doQuery(b.acctQuery()) + + # i could parse the sgml. or i could do this. + c = re.compile(r'(\d+)', re.MULTILINE) + out = {} + for acctnum in re.findall(c,r): + out[acctnum] = {} + c = re.compile(r'([\d\.\-]+)', re.MULTILINE) + for acctnum in out.keys(): + r = b.doQuery(b.ccQuery(acctnum,'19700101000000')) + out[acctnum]['balance'] = re.findall(c,r)[0] + return out + +def main(): + s = AmexScraper( + user=os.environ['AMEXUSERNAME'], + password=os.environ['AMEXPASSWORD'] + ) + print json.dumps(s.scrape()) + +if __name__=="__main__": + main()