I'm just trying to get modules to install like lxml wit easy_install can't get it to work on Windows or Linux fuck.
pip > easy_install
On windows, a prebuilt exe worked for me.
On ubuntu, apt-get works.
I'm just trying to get modules to install like lxml wit easy_install can't get it to work on Windows or Linux fuck.
class Api():
def __init__(self):
self.wai_slider = 50;
self.wai_user = 'XXXXXXXXXXXXXXXXXXXXXXXXX';
self.wai_pass = 'XXXXXXXXXXXXXXXXXXXXXXXXX';
self.wai_speed = False;
self.wai_protected = False;
self.wai_nooriginal = False;
def wai_spin(self, body):
import urllib
import urllib2
api_url = "http://beta.wordai.com/spinit.php"
data = {}
data['s'] = body
data['slider'] = self.wai_slider
data['api'] = "true"
data['email'] = self.wai_user
data['pass'] = self.wai_pass
if self.wai_speed:
data['speed'] = self.wai_speed
if self.wai_protected:
data['protected'] = self.wai_protected
if self.wai_nooriginal:
data['nooriginal'] = self.wai_nooriginale
spin_req = urllib2.Request(api_url, urllib.urlencode(data))
return urllib2.urlopen(spin_req).read()
Code:class Api(): def __init__(self): self.wai_slider = 50; self.wai_user = 'XXXXXXXXXXXXXXXXXXXXXXXXX'; self.wai_pass = 'XXXXXXXXXXXXXXXXXXXXXXXXX'; self.wai_speed = False; self.wai_protected = False; self.wai_nooriginal = False; def wai_spin(self, body): import urllib import urllib2 api_url = "http://beta.wordai.com/spinit.php" data = {} data['s'] = body data['slider'] = self.wai_slider data['api'] = "true" data['email'] = self.wai_user data['pass'] = self.wai_pass if self.wai_speed: data['speed'] = self.wai_speed if self.wai_protected: data['protected'] = self.wai_protected if self.wai_nooriginal: data['nooriginal'] = self.wai_nooriginale spin_req = urllib2.Request(api_url, urllib.urlencode(data)) return urllib2.urlopen(spin_req).read()
I don't, as I just wanted something simple to replace the best spinner code i was using previouslyhow do you pass options to it right now?
I don't, as I just wanted something simple to replace the best spinner code i was using previously
I will (or won't) add relevant functions for getting and setting variables if I need more flexibility
I see what you're getting at anyway and appreciate the link which I'll have a look at now
import urllib
import urllib2
def spin(content, username, password, slider=50, speed=False, protected=False, nooriginal=False):
return urllib2.urlopen(urllib2.Request('http://beta.wordai.com/spinit.php', urllib.urlencode({'s': content, 'slider': slider, 'api': 'true', 'email': username, 'pass': password, speed: str(speed).lower(), protected: str(protected).lower(), nooriginal: str(nooriginal).lower()}))).read()
>>> wordai.spin('hello world today is a nice day', username, password)
'{hello|hi} world today is {a nice|a good|a great|a wonderful} {day|time}'
Very much so! Cheers for the pointersYou pretty much never need setters and getters in python (@property FTW in complex cases). Hope that helps.
###################################
# A iterable proxy handler class. #
# Author: crackp0t #
# Filename: proxyhandler.py #
###################################
from csv import reader
class ProxyHandler:
# proxyFile: The full path to the file containing the proxies
# shouldRewind: Boolean variable that controls the file looping. Set to False if you want to raise StopIteration after the first pass.
# Set the variable to True (defaults to True) if you want to go back to the start of the file and loop again.
def __init__(self, proxyFile, shouldRewind = True):
self._shouldRewind = shouldRewind
self._proxyFile = proxyFile
self.__LoadProxies()
def next(self):
try:
return self.proxies.next()
except:
if self._shouldRewind is True:
self.__LoadProxies()
return self.proxies.next()
else:
raise StopIteration
def __iter__(self):
return self
def __LoadProxies(self):
fileContents = open(self._proxyFile).read().splitlines()
self.proxies = reader(fileContents, delimiter = ':')
proxies = ProxyManager('myproxies.txt')
while True:
print proxies.proxy
Also, why does the __LoadProxies method exist when it is only called by __init__ ?
crackp0t, some code to consider, has proper delays for proxies, randomisation etc (and no psuedo private methods ) -
https://github.com/mattseh/magicrequests/blob/master/magicrequests.py#L97
Usage:
Code:proxies = ProxyManager('myproxies.txt') while True: print proxies.proxy
if its delay for proxies, its prob so they can "sleep" or not be used within X seconds to prevent an IP ban ( like with Google or other site that notices a certain IP hit it within the last 3 seconds )
def domain(url):
"""Strip a given URL to get just the domain (removing any
sub-domains, the path, query, and fragments."""
from urlparse import urlparse
# Extract the netloc
urlo = urlparse(url)
# Split by period and join based on number of segments
domain = urlo.netloc.split('.')
domain = '.'.join(len(domain[-2]) < 4 and domain[-3:] or domain[-2:])
return '%s://%s' % (urlo.scheme, domain)
def function_cache(function, cache_days=5, call_function=True, **kwargs):
function_name = function.func_name
recent_days = [str(datetime.date.today() - datetime.timedelta(days=i)) for i in range(cache_days)]
kwargs_str = re.sub(' at 0x[0-9a-z]+', '', str(kwargs)) #get rid of memory references
kwargs_hash = hashlib.sha1(kwargs_str).hexdigest()
for recent_day in recent_days:
filename = 'function_cache/{day}-{function_name}-{hash}.pickle'.format(day=recent_day, function_name=function_name, hash=kwargs_hash)
if os.path.isfile(filename):
return pickle.load(open(filename, 'rb'))
if call_function:
result = function(**kwargs)
pickle.dump(result, open('function_cache/{day}-{function_name}-{hash}.pickle'.format(day=recent_days[0], function_name=function_name, hash=kwargs_hash), 'wb'))
return result
else:
return False
https://github.com/mattseh/python-webNice to see there is a python code in this forum, usually I just found PHP thingy...
I want to ask to @mattseh about this code..
Code:import web
what module is that? definitely its not web.py isn't it?