app.py
from flask import Flask, jsonify, request,render_template
import pickle
app = Flask(__name__,template_folder='template')
# load model
model = pickle.load(open("model.pkl",'rb'))
# app
#app.route('/')
def home():
return render_template('recommendation.html')
# routes
#app.route('/api', methods=['POST'])
def predict():
result=request.form
query_user_name=result["user name"]
user_input = {'query':query_user_name}
output_data=model(query_user_name)
print(output_data)
# send back to browser
output ={output_data}
return f'<html><body><h1>{output_data}</h1><form action="/"><button type="submit">back </button> </form></body></html>'
if __name__ == '__main__':
app.run(debug=True)
I am deploying it using a model made from following function,not a pre-existing model having predict attribute.
def model(user):
recommended_list=[]
top_list=[]
x = data.iloc[data.loc[data.Users == user].index[0],2:]
similar = np.array([(data.iloc[i,0],weight_factor(x,data.iloc[i, 2:])) for i in range(0,data.shape[0],1)])
index= np.argsort( similar[:,1] )
index=index[::-1]
similar=similar[index]
neighbours = similar[similar[:,1].astype(float) > 0.6] #Taking threshold as 0.6
for i in range(0,len(neighbours),1):
for j in range(2,len(data.columns),1):
if data.iloc[data.loc[data.Users == neighbours[i][0]].index[0],j]==1 and data.iloc[data.loc[data.Users == user].index[0],j]==0:
recommended_list.append(data.columns[j])
if (len(neighbours)>10):
for i in range(0,10,1): #Top 10 neighbours
top_list.append(neighbours[i][0])
else:
for i in range(len(neighbours)):
top_list.append(neighbours[i][0])
if user in top_list: #Remove the user of which we are asked to find neighbours,each user is always strongly correlated with itself and its of no use to us.
top_list.remove(user)
print(" ")
print("Top users similar to this user are:")
print(" ")
for i in range(0,len(top_list),1):
print(top_list[i])
print(" ")
print("Users similar to this user liked these products too:")
print(" ")
recommended_array=np.unique(np.array(recommended_list))
for i in range(0,len(recommended_array),1):
print(recommended_array[i])
How to deploy it using flask,my output is not being shown in the window,though on deploying the home page is being shown and input is being taken.
your function "model(user)" must return somethings
example:
def model(user):
# code
return somethings
Related
This is my first attempt to extract tweets using twitter api and tweepy. When I execute my code it keep printing 401 every time in a new line. What am I doing wrong is I am not able to figure out. Any help is appreciated.
import tweepy
import json
access_token = ""
access_token_secret = ""
consumer_key = ""
consumer_secret = ""
auth = tweepy.OAuthHandler(consumer_key,consumer_secret)
auth.set_access_token(access_token,access_token_secret)
class MyStreamListener(tweepy.StreamListener):
def __init__(self, api=None):
super(MyStreamListener, self).__init__()
self.num_tweets = 0
self.file = open("tweets.txt", "w")
def on_status(self, status):
tweet = status._json
self.file.write( json.dumps(tweet) + '\n' )
self.num_tweets += 1
if self.num_tweets < 100:
return True
else:
return False
self.file.close()
def on_error(self, status):
print(status)
l = MyStreamListener()
stream=tweepy.Stream(auth,l)
stream.filter()
tweets_data_path = 'tweets.txt'
tweets_file = open(tweets_data_path, "r")
tweets_data = []
for line in tweets_file:
tweet = json.loads(line)
tweets_data.append(tweet)
tweets_file.close()
print(tweets_data[0].keys())
Go to your twitter account settings and change timezone to that as of your computer. Then, go to twitter app settings and generate new consumer key and new access token. These newly generated keys and tokens you should use to avoid 401 error.
I tried to render a variable('predictions') in URL(/predict) on url('/hello'). I am beginner in the web development. If someone knows , please help.
app = Flask(__name__)
#app.route('/predict', methods=['POST'])
def apicall(responses = None):
test_json = request.get_json()
test = pd.read_json(test_json, orient='records')
query_df = pd.DataFrame(test)
clf = 'kmeans__model.pkl'
print("Loading the model...")
lin_reg_model = None
with open(clf,'rb') as f:
lin_reg_model = joblib.load(f)
# lin_reg_model = joblib.load('/home/q/new_project/models/kmeans_model.pkl')
print("The model has been loaded...doing predictions now...")
predictions = lin_reg_model.predict(test)
print(predictions)
prediction_series = list(pd.Series(predictions))
final_predictions = pd.DataFrame(list(zip(prediction_series)))
responses = jsonify(predictions=final_predictions.to_json(orient="records"))
responses.status_code = 200
return (responses)
#app.route('/hello')
def hello():
**What should be used here to render *predictions*?**
return 'Hello, World '
You can return to template and show in html if you like. Create templates folder and create html file.
import requests
def hello():
response = requests.get(url_for('apicall'))
return render_template('<yourtemplate.html>', predictions=response.json())
HTML
{{ predictions }}
Note: Please follow this link for any problem with thread and deployment Handling multiple requests in Flask
Soooo I have been working on a script I took from ArcGIS Blueprints:
http://file.allitebooks.com/20151230/ArcGIS%20Blueprints.pdf
It should convert geolocated tweets into a geodatabase. I have the Twitter Streaming API already operational, and been playing with different ways to extract x/y, but keep coming back to this script, every so often, hoping I can get it running with no luck. I am stuck on a "List Index Out of Range" error. If anyone is gracious enough to offer some ideas on how I can get by this error I will be forever grateful. If nothing else this endeavor has exploited my shortcomings with Python and Arcpy, and hopefully it will round me out in the long run. For right now, I sure would like to get some mileage out of this script and the work Ive invested into it. Thank you!
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import arcpy
import sys
import time
consumer_key = 'xxx'
consumer_secret = 'xxx'
token_key = 'xxx'
token_secret = 'xxx'
class StdOutListener(StreamListener):
def __init__(self, start_time, featureClass, time_limit):
super(StdOutListener, self).__init__()
self.time = start_time
self.limit = time_limit
self.featureClass = featureClass
def on_status(self, status):
while (time.time() - self.time) < self.limit:
if status.geo is not None:
dictCoords = status.geo
listCoords = dictCoords['coordinates']
latitude = listCoords[0]
longitude = listCoords[1]
cursor =arcpy.da.InsertCursor(self.featureClass,"SHAPE#XY"))
cursor.insertRow([(longitude,latitude)])
print(str(listCoords[0]) + "," + str(listCoords[1]))
return True
else:
print "No coordinates found"
return True
start_time = time.time()
arcpy.env.workspace = "c:\ArcGIS_Blueprint_Python\data\Twitter\TweetInformation.gdb" "
def main():
try: #new
featureClass = sys.argv[1]
monitorTime = sys.argv[2]
monitorTime = monitorTime * 3600
sr = arcpy.SpatialReference(4326)
arcpy.env.overwriteOutput = True
arcpy.CreateFeatureclass_management(arcpy.env.workspace,
featureClass, "POINT", spatial_reference=sr)
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(token_key, token_secret)
stream = Stream(auth, StdOutListener(start_time, featureClass,
time_limit=monitorTime)) #172800
stream.filter(track=['car'])
except Exception as e:
print(e.message)
if __name__ == '__main__':
main()
I am new to programming applications, although I have a rudimentary handle on it.
I would like to get user input for # of columns and # of rows, then create a table of images (identical) scaled to fit on the screen.
I would then like to be able to cycle through these and change their color one at a time.
I can program this in python (see below) but I have no idea how to do this graphically.
Thoughts?
print("Welcome to Well Tracker!")
r=input('number of rows? ')
while r.isdigit()!=True:
print('invalid try again')
r=input('number of rows? ')
r=int(r)
c=input('number of collumns? ')
while c.isdigit()!=True:
print('invalid try again')
c=input('number of rows? ')
c=int(c)
print('\nTap enter to cross of a well, \nenter anything else to end\n')
wellC=[0]*c
def showWell(well):
print('The Well')
for i in well:
print(i)
def fillNumbers(matrix):
for i in range(len(matrix)):
for j in range(len(matrix[i])):
matrix[i][j]=j+1
return matrix
def makeWell(rows, collumns):
i = 0
well=[]
while i<rows:
well+=[collumns[:]]
i+=1
well=fillNumbers(well)
return well
wellPlate=makeWell(r,wellC)
showWell(wellPlate)
def crossOff(well):
end='';
for col in range(len(well[0])):
row=0
while row < len(well):
end=input();
if end != '':
return False
well[row][col]='x'
row+=1
showWell(well)
def checkForX(well):
xs=0
for i in range(len(well)):
for j in range(len(well[i])):
if well[i][j] == 'x':
xs+=1
return xs
def main():
platesComplete=0
while True:
wellPlate=makeWell(r,wellC)
if crossOff(wellPlate) == False:
break
platesComplete+=1
wellsComplete=checkForX(wellPlate)+platesComplete*r*c
main()
Updated:
okay, my recommendation is to look to use a UICollectionView. Example here:
https://github.com/bluedome/DragToReorderCollectionView
This code was working fine few moments ago but now its not working?
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import time
import random
consumer_key = ""
consumer_secret = ""
access_token_key = ""
access_token_secret = ""
Coords = dict()
Place = dict()
PlaceCoords = dict()
XY = []
class StdOutListener(StreamListener):
""" A listener handles tweets that are the received from the stream.
This is a basic listener that inserts tweets into MySQLdb.
"""
def on_status(self, status):
#print "Tweet Text: ",status.text
text = status.text
#print "Time Stamp: ",status.created_at
try:
Coords.update(status.coordinates)
XY = (Coords.get('coordinates')) #Place the coordinates values into a list 'XY'
#print "X: ", XY[0]
#print "Y: ", XY[1]
except:
#Often times users opt into 'place' which is neighborhood size polygon
#Calculate center of polygon
Place.update(status.place)
PlaceCoords.update(Place['bounding_box'])
Box = PlaceCoords['coordinates'][0]
XY = [(Box[0][0] + Box[2][0])/2, (Box[0][1] + Box[2][1])/2]
#print "X: ", XY[0]
#print "Y: ", XY[1]
pass
# Comment out next 4 lines to avoid MySQLdb to simply read stream at console
#print {"status_id":status.id_str,"timestamp":status.created_at,"location X":XY[0],"location Y":XY[1],"text":text}
print status.id_str,status.created_at,XY[0],XY[1],text
def main():
l = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token_key, access_token_secret)
stream = Stream(auth, l, timeout=30)
#sleep
nsecs = 2
#Only records 'locations' OR 'tracks', NOT 'tracks (keywords) with locations'
while True:
try:
# Call tweepy's userstream method
#stream.filter(locations=[-122.75,36.8,-121.75,37.8,-74,40,-73,41],languages=['es','tr','ko','fr','ru','de','ja','it','pt'], async=False)##These coordinates are approximate bounding box around USA
stream.filter(locations=[-180,-90,180,90],async=False)
#stream.filter()
#stream.filter(track=['obama'])## This will feed the stream all mentions of 'keyword'
break
except Exception, e:
print Exception , e
# Abnormal exit: Reconnect
#nsecs=random.randint(30)
print "Reconnecting ",nsecs
time.sleep(nsecs)
if __name__ == '__main__':
main()
Is there any other way to collect streaming data based on locations?
No, using the locations parameter is the only way. However, I would strongly advise against your catch-all except statement in the StdOutListener. It is most likely catching a different type of error than the one you expect, masking a problem.