Commit a1a0e1c6 authored by Janez K's avatar Janez K

added new streaming widgets

parent c40f4c76
...@@ -198,8 +198,8 @@ INSTALLED_APPS_WORKFLOWS_SUB = ( ...@@ -198,8 +198,8 @@ INSTALLED_APPS_WORKFLOWS_SUB = (
'workflows.cforange', 'workflows.cforange',
'workflows.perfeval', 'workflows.perfeval',
'workflows.mysql', 'workflows.mysql',
'workflows.lemmagen', #'workflows.lemmagen',
'workflows.crossbee', #'workflows.crossbee',
'workflows.scikitAlgorithms', 'workflows.scikitAlgorithms',
#'workflows.lemmagen', #'workflows.lemmagen',
#'workflows.crossbee', #'workflows.crossbee',
......
...@@ -36,6 +36,11 @@ def dynamic_import_globals(name, package, localSetAttrFunc): ...@@ -36,6 +36,11 @@ def dynamic_import_globals(name, package, localSetAttrFunc):
try: try:
m = __import__(name, globals(), locals(), ['*']) m = __import__(name, globals(), locals(), ['*'])
except: except:
import sys, traceback
print "Exception in user code:"
print '-'*60
traceback.print_exc(file=sys.stdout)
print '-'*60
return return
all_names = [name for name in dir(m) if name[0]!='_'] all_names = [name for name in dir(m) if name[0]!='_']
g = globals() g = globals()
......
...@@ -11,13 +11,13 @@ from django.shortcuts import render ...@@ -11,13 +11,13 @@ from django.shortcuts import render
def scikitAlgorithms_displayDS(request,input_dict,output_dict,widget): def scikitAlgorithms_displayDS(request,input_dict,output_dict,widget):
data = input_dict['data'] data = input_dict['data']
return render(request, 'visualizations/scikitAlgorithms_displayDS.html',{'widget':widget,'input_dict':input_dict,'output_dict':helperDisplayDS(output_dict)}) return render(request, 'visualizations/scikitAlgorithms_displayDS.html',{'widget':widget,'input_dict':input_dict,'output_dict':helperDisplayDS(output_dict)})
def scikitDataset_table_to_dict(data): def scikitDataset_table_to_dict(data):
attrs, metas, data_new = [], [], [] attrs, metas, data_new = [], [], []
# try: # try:
# class_var = data.domain.class_var.name # class_var = data.domain.class_var.name
#except: #except:
class_var = '' class_var = ''
for m in data.domain.get_metas(): for m in data.domain.get_metas():
metas.append(data.domain.get_meta(m).name) metas.append(data.domain.get_meta(m).name)
for a in data.domain.attributes: for a in data.domain.attributes:
...@@ -34,4 +34,4 @@ def scikitDataset_table_to_dict(data): ...@@ -34,4 +34,4 @@ def scikitDataset_table_to_dict(data):
inst_new.append((a.name, pretty_float(value, a))) inst_new.append((a.name, pretty_float(value, a)))
data_new.append(inst_new) data_new.append(inst_new)
return {'attrs':attrs, 'metas':metas, 'data':data_new, 'class_var':class_var} return {'attrs':attrs, 'metas':metas, 'data':data_new, 'class_var':class_var}
\ No newline at end of file
This diff is collapsed.
...@@ -5,6 +5,134 @@ Streaming widgets librarby ...@@ -5,6 +5,134 @@ Streaming widgets librarby
@author: Janez Kranjc <janez.kranjc@ijs.si> @author: Janez Kranjc <janez.kranjc@ijs.si>
''' '''
from workflows.security import safeOpen
def streaming_add_neutral_zone(input_dict):
import copy
tweets = copy.deepcopy(input_dict['ltw'])
neutral_zone = float(input_dict['zone'])
ltw = []
for tweet in tweets:
if tweet['reliability']!=-1.0 and tweet['reliability']<neutral_zone:
tweet['sentiment']="Neutral"
ltw.append(tweet)
output_dict = {}
output_dict['ltw']=ltw
return output_dict
def streaming_remove_words_from_tweets(input_dict):
import copy
tweets = copy.deepcopy(input_dict['ltw'])
words = input_dict['words'].encode("utf-8")
words = words.split("\n")
ltw = []
import re
for tweet in tweets:
for word in words:
pattern = re.compile(word, re.IGNORECASE)
tweet['text']=pattern.sub('',tweet['text'])
ltw.append(tweet)
output_dict = {}
output_dict['ltw']=ltw
return output_dict
def streaming_simulate_stream_from_text_file(input_dict,widget,stream=None):
import datetime
csvfile = safeOpen(input_dict['file'])
tweet_data = csvfile.read()
tweet_data = tweet_data.strip()
tweets = tweet_data.split("\n")
ltw = []
i=1
for tw in tweets:
tweet = {}
tweet['id']=i
tweet['created_at']=datetime.datetime.now()
tweet['text']=tw
tweet['user']="dragi"
tweet['lang']="bg"
i=i+1
ltw.append(tweet)
output_dict = {}
output_dict['ltw']=ltw
return output_dict
def streaming_simulate_stream_from_csv(input_dict,widget,stream=None):
from streams.models import StreamWidgetData
import datetime
import csv
csvfile = safeOpen(input_dict['csv'])
csvreader = csv.reader(csvfile,delimiter=";",quotechar='"')
rows = []
ltw = []
i=0
counter = 0
started = False
last_id = "not-started-yet"
if not stream is None:
try:
swd = StreamWidgetData.objects.get(stream=stream,widget=widget)
last_id = swd.value
except:
started = True
else:
started = True
for row in csvreader:
rows.append(row)
if i!=0:
rows[i][1] = datetime.datetime.strptime(rows[i][1],"%m/%d/%Y %I:%M:%S %p")
tweet = {}
tweet['id'] = rows[i][0]
tweet['created_at'] = rows[i][1]
tweet['text'] = rows[i][3].encode('utf-8')
tweet['user'] = rows[i][5].encode('utf-8')
tweet['lang'] = rows[i][11]
if started:
counter = counter + 1
ltw.append(tweet)
if counter == 50 and started:
started = False
if not stream is None:
try:
swd = StreamWidgetData.objects.get(stream=stream,widget=widget)
swd.value = tweet['id']
swd.save()
except:
swd = StreamWidgetData()
swd.stream = stream
swd.widget = widget
data = tweet['id']
swd.value = data
swd.save()
if tweet['id']==last_id:
started = True
i = i + 1
if counter < 51 and not stream is None and started == True:
try:
swd = StreamWidgetData.objects.get(stream=stream,widget=widget)
swd.value = "done"
swd.save()
except:
swd = StreamWidgetData()
swd.stream = stream
swd.widget = widget
data = "done"
swd.value = data
swd.save()
output_dict = {}
#print ltw
#print len(ltw)
output_dict['ltw']=ltw
return output_dict
def streaming_split_pos_neg(input_dict): def streaming_split_pos_neg(input_dict):
tweets = input_dict['ltw'] tweets = input_dict['ltw']
...@@ -86,8 +214,10 @@ def streaming_sentiment_graph(input_dict,widget,stream=None): ...@@ -86,8 +214,10 @@ def streaming_sentiment_graph(input_dict,widget,stream=None):
def streaming_tweet_sentiment_service(input_dict,widget,stream=None): def streaming_tweet_sentiment_service(input_dict,widget,stream=None):
import pickle import pickle
from pysimplesoap.client import SoapClient, SoapFault from pysimplesoap.client import SoapClient, SoapFault
import pysimplesoap
client = SoapClient(location = "http://batman.ijs.si:8008/",action = 'http://batman.ijs.si:8008/',namespace = "http://example.com/tweetsentiment.wsdl",soap_ns='soap',trace = False,ns = False) client = SoapClient(location = "http://batman.ijs.si:8008/",action = 'http://batman.ijs.si:8008/',namespace = "http://example.com/tweetsentiment.wsdl",soap_ns='soap',trace = False,ns = False)
pysimplesoap.client.TIMEOUT = 60
list_of_tweets = input_dict['ltw'] list_of_tweets = input_dict['ltw']
......
...@@ -53,8 +53,16 @@ def streaming_display_tweets_visualization(request,widget,stream): ...@@ -53,8 +53,16 @@ def streaming_display_tweets_visualization(request,widget,stream):
tweet_data = [] tweet_data = []
tweets_unsorted = tweet_data tweets_unsorted = tweet_data
tweets = sorted(tweets_unsorted, key=operator.itemgetter('created_at')) tweets = sorted(tweets_unsorted, key=operator.itemgetter('created_at'))
tweets.reverse() if request.GET.get('reverse')=="true":
paginator = Paginator(tweets,20) pass
else:
tweets.reverse()
rpp=20
if request.GET.has_key('rpp'):
rpp = int(request.GET.get('rpp'))
if rpp<1:
rpp = 20
paginator = Paginator(tweets,rpp)
page=request.GET.get('page') page=request.GET.get('page')
try: try:
tweets = paginator.page(page) tweets = paginator.page(page)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment