PFQueryTableViewController Swift – load next page

I was having some issues using PFQueryTableViewController with Parse loading the next page using didselectrowatindexpath. The error was NSIndexOut of Range when selecting the load next page cell.

Here was the fix:


// Initialize the PFQueryTable tableview
    override init(style: UITableViewStyle, className: String?) {
        super.init(style: style, className: className)
    }
    
    
    required init(coder aDecoder: NSCoder) {
        super.init(coder: aDecoder)!
        parseClassName = "yourClass"
        pullToRefreshEnabled = true
        paginationEnabled = true
        objectsPerPage = 11
        
    }
    
        override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath, object: PFObject?) -> PFTableViewCell {
        
        
        
        let cellIdentifier = "statusCell"
        var cell = tableView.dequeueReusableCellWithIdentifier(cellIdentifier) as? statusCellClass
        if cell == nil {
            cell = statusCellClass(style: UITableViewCellStyle.Default, reuseIdentifier: cellIdentifier)
            }
        
        // Extract values from the PFObject to display in the table cell
        if let object = object {
          cell!.status.text = (object["status"] as! String)
        // Date for cell subtitle
            var dateFormatter = NSDateFormatter()
            dateFormatter.dateFormat = "yyyy-MM-dd"
                let dateForText = object.createdAt as NSDate!
                cell!.created.text = dateFormatter.stringFromDate(dateForText)
        }
        return cell!
    }
    
    

    override func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath){
        //if the index is out of range 11, [0-10] make that cell load next page
        if (indexPath.row == self.objects!.count){
            loadNextPage()
        }
        else{
            var targetObject = objectAtIndexPath(indexPath)!
            rowObjectID = targetObject.objectId as String!
            NSLog("Row Pressed")
            NSLog("%@", targetObject)
            NSLog("%@", rowObjectID)
            performSegueWithIdentifier("editStatus", sender: self)
        }
    }

Python Save Facebook Graph data to MySQL database

In this script I wanted to save public Facebook Pages post data into my local database. See source in github

The main.py script will setup the database structure, retrieve the data from the facebook graph api, then store it into your database.

If you would like only today’s post from the Facebook pages you can use the argument –t without this it will get all public posts available. You can also use -n (number of posts) to limit the amount of post to save.

You will also need to include the page names. Here is an example when you want to save all the public  posts data for a page TacoBell.all_graph_api

Here you only get today’s post:

graph_api

To get multiple pages at once no coma is needed just add all the pages with a space in between.

main.py -p tacobell mcdonalds -t

main.py:


from time import sleep
from time import gmtime
from dateutil import parser
import datetime
from facebook import GraphAPI
import argparse, sys
from urlparse import parse_qs, urlparse
import mysql.connector
from mysql.connector import errorcode
from config import config, access_token

cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
table_name = 'pages'
table = ("CREATE TABLE `pages` ("
"  `id` int(11) NOT NULL AUTO_INCREMENT,"
"  `page_name` varchar(40) NOT NULL,"
"  `post_id` varchar(40) NOT NULL,"
"  `created_time` datetime NOT NULL,"
"  `message` TEXT NOT NULL,"
"  PRIMARY KEY (`id`)"
") ENGINE=InnoDB")
def create_db_table():
cursor.execute('SHOW DATABASES;')
all_dbs = cursor.fetchall()
if all_dbs.count((config['database'],)) == 0:
print("Creating db")
cursor.execute(
"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(config['database']))

cursor.execute('USE %s;' % config['database'])
cursor.execute("SHOW TABLES;")
all_tables = cursor.fetchall()
if all_tables.count(('pages',)) == 0:
print("creating table")
cursor.execute(table)
add_message = ("INSERT INTO pages "
"(post_id, page_name, created_time, message) "
"VALUES (%s, %s,%s, %s)")
def insert_post(post_id, page_name, created_time, message):
cursor.execute("SELECT * FROM pages WHERE post_id=%s;", (post_id,))
if cursor.fetchall():
print("already saved this post")
return False
print('saving this post')
cursor.execute(add_message, (post_id, page_name, created_time, message))
cnx.commit()
import calendar
from datetime import datetime, timedelta
def make_it_good(s):
res = ""
for ch in s:
try:
res += ch.decode('utf8')
except Exception as e:
pass
return res
def utc_to_local(utc_dt):
# get integer timestamp to avoid precision lost
timestamp = calendar.timegm(utc_dt.timetuple())
local_dt = datetime.fromtimestamp(timestamp)
assert utc_dt.resolution >= timedelta(microseconds=1)
return local_dt.replace(microsecond=utc_dt.microsecond)
def save_posts(page, num_post=None, only_today=False):
if not num_post:
num_post = 1000000000
graph = GraphAPI(access_token)
arguments = {}
total = 0
num_saved = 0
while 1:
try:
posts = graph.get_connections(page, 'posts', **arguments)
except Exception as e:
try:
sleep(3)
posts = graph.get_connections(page, 'posts', **arguments)
except Exception as e:
print("Retrieved a total of %d posts" % total)
print("But some error happened after that")
print("This is the error")
print(e)
print("Breaking...")
break
next = None
if 'paging' in posts:
next = posts['paging']['next']
posts = posts['data']
total += len(posts)
any = False
for post in posts:
if 'message' in post:
created_time = parser.parse(post['created_time'])
created_time = utc_to_local(created_time)
today = datetime.now()
if only_today:
if today.year == created_time.year and today.month == created_time.month and \
today.day == created_time.day:
any = True
else:
continue
num_saved += 1
insert_post(post['id'], page, created_time, make_it_good(post['message']))
if only_today and not any:
break
if next:
qs = parse_qs(urlparse(next).query)
arguments = {'limit': qs['limit'][0], 'until': qs['until'][0]}
else:
break
print('done')
print('Saved total of %d posts' % num_saved)
create_db_table()
def main():
parser = argparse.ArgumentParser(
description='Saving facebook pages post to a mysql db ')
parser.add_argument("-p", nargs='+', help="Please specify the name of page")
parser.add_argument('-n',
help="Please specify number of posts you want to save to db( if no argument is supplied all "
"posts will be saved, note this might take some time")
parser.add_argument('-t', action='store_true', help="Specify if you want to get only today posts")
args = parser.parse_args()
only_today = args.t
pages = args.p
n = args.n
if not pages:
parser.print_usage()
sys.exit(1)
if n:
n = int(n)
if only_today:
print("Saving today posts")
for page in pages:
print("Saving posts for %s" % page)
save_posts(page, n, only_today)
if (__name__ == '__main__'):
main()

 

Config file:


config={
'user':'root',
'host':127.0.0.1,
'password':'mypass',
'database':'analytics'
}
access_token='token'

Python Tweepy Twitter – Mysql query

I wanted to do some research with public tweets using tweepy. Basically what I wanted to do is save all the tweets to my local mysql database instance that met my certain criteria. The criteria / filter was a table I have within my database.

I decided to use tweepy, find it on github  here.

Check out Brandon Wood’s article here where I based this off of.


# twitter client
import tweepy

# database interface
import mysql.connector
conn = mysql.connector.connect(user='root', password='mypass', host='127.0.0.1', database='analytics')
curs = conn.cursor()

class StreamWatcherHandler(tweepy.StreamListener):

 def on_status(self, status):

 
 try:
 usr = status.author.screen_name.strip()
 txt = status.text.strip()
 in_reply_to = status.in_reply_to_status_id
 src = status.source.strip()
 cat = status.created_at

 # Now that we have our tweet information, let's stow it away in our sql database
 curs.execute("insert into tweets (username, created_at, content, reply_to, source) values(%s, %s, %s, %s, %s)", 
 (usr, cat, txt, in_reply_to, src))
 conn.commit()
 except Exception as e:
 # Most errors we're going to see relate to the handling of UTF-8 messages (sorry)
 print(e)

 def on_error(self, status_code):
 print('An error has occured! Status code = %s' % status_code)
 return True

def main():
 # establish stream
 consumer_key = "key"
 consumer_secret = "secret"
 auth1 = tweepy.auth.OAuthHandler(consumer_key, consumer_secret)

 access_token = "token"
 access_token_secret = "token_secret"
 auth1.set_access_token(access_token, access_token_secret)

#This is where we define our filter from our database. Each cell will contain a keyword that I want the stream to look for 
 curs.execute('SELECT name FROM mytable');
 row = [item[0] for item in curs.fetchall()]

 print "Establishing stream...",
 stream = tweepy.Stream(auth1, StreamWatcherHandler(), timeout=None)
 stream.filter(track=row)
 print "Done"

if __name__ == '__main__':
 try:
 main()
 except KeyboardInterrupt:
 print "Disconnecting from database... ",
 conn.commit()
 conn.close()
 print "Done"

Here is what the database table looks like:

mytable_data

 

So now the stream is only looking for tweets that have “test”, “test2”, or “test3” keywords.

 

 

Parse.com PHP GeoPoint- Laravel

I recently had a project that required an app to collect coordinates and display the location via an online dashboard.

I decided to use Parse.com as the backend for the app and the site. For the front end web part I went with the Laravel framework.

I came across the following issues when trying to echo out the coordinates from my parse table.

Object of class Parse\ParseGeoPoint could not be converted to string

Cannot use object of type Parse\ParseGeoPoint as array

The issue appeared to be related to a json conversion in Laravel as discussed here.

The fix was to encode the coordinate values to return array data if there is any.

Error:


$query2 = new ParseQuery("table");
$query2->select("coordinates");
$query2->select("username");
$query2->select("comments");
$query2->descending("createdAt");
$query2->limit(5);
$results2 = $query2->find();
if ($results2) {
//Display coordinates limit to 5
foreach($results2 as $row2){

echo $row2->username;
echo $row2->coordinates;
echo $row2->comments;
echo date_format($row2->getCreatedAt(), 'Y-m-d');

}
}

Encoding fix:

$query2 = new ParseQuery("table");
$query2->select("coordinates");
$query2->select("username");
$query2->select("comments");
$query2->descending("createdAt");
$query2->limit(5);
$results2 = $query2->find();
if ($results2) {
//Display coordinates limit to 5 
foreach($results2 as $row2){
 //encoding the coordinate values to return array data if there is data
 $values = $row2->coordinates->_encode();
 echo $row2->username;
 echo $values['latitude'] . ',' . $values['longitude']; 
 echo $row2->comments;
 echo date_format($row2->getCreatedAt(), 'Y-m-d');

}
}

 

 

 

Powershell Unlock Account – loop

Powershell script to view user account and unlock the account:

[Note: must run as Domain admin to view account and have ActiveDirectory module installed]

Screenshot:

ADuserinfo

Code:


Import-Module ActiveDirectory
$val = 0
while ($val -lt 1)
{
$username = Read-Host -Prompt 'Input the user name'
get-aduser -identity $username -properties passwordlastset, passwordexpired, lockedout, lastlogon, scriptpath

Write-Host -ForegroundColor YELLOW 'Do you need to unlock?'
$confirmation = Read-Host "y/n:"
if ($confirmation -eq 'y') {
unlock-adaccount -identity $username
Write-Host -ForegroundColor YELLOW 'Unlocked :)'
} else {
Write-Host -ForegroundColor YELLOW 'Okay'
Write-Host '------------------------------'
Write-Host '------------------------------'
}
}

 

Get Computer Serial Number, Model, BIOS WMIC

The following is an easy script that searches your domain for the hostname or ip address and retrieves the serial, manufacturer, model, and bios for a computer. Note: You must be domain admin

Screenshot:

Get Computer Serial Number

Code:

:: This bat-file shows serial number, manufacturer and model for a computer
:: Written by JD
:: 12/5/2012
@ECHO --------------------------------------------------------------------
@ECHO This shows serial, manufacturer, model, and bios for a computer
@ECHO --------------------------------------------------------------------
@ECHO OFF 
:TOP
SET /P computername=Enter computername:
echo.
wmic /node:"%computername%" bios get serialnumber
WMIC /node:"%computername%" BIOS Get Manufacturer
WMIC /node:"%computername%" computersystem Get Model
WMIC /node:"%computername%" BIOS Get version
@ECHO --------------------------------------------------------------------
@ECHO Press any key to do new search...
@ECHO --------------------------------------------------------------------
pause >nul
Goto Top

Documentum DQL Queries

Custom DQL Query to show users logged in and out times.

  1. To run a dql query first login to your content server.
  2. Select IDQL for the repository
  3. enter your username and password with dql access
  4. run the query:
    • select event_name,user_name,time_stamp,session_id,user_id from dm_audittrail where event_name = ‘dm_connect’ and user_name not like ‘dm%’ and time_stamp between date(’06/30/2014′) and date(’07/08/2014′)
      union all
      select event_name,user_name,time_stamp,session_id,user_id from dm_audittrail where event_name = ‘dm_disconnect’and user_name not like ‘dm%’ and time_stamp between date(’06/30/2014′) and date(’07/08/2014′)order by 4,1
      GOdql_query
  5. You will get the output in the cmd window.
  6. If you wish to output the values to an excel/csv file you can run command via the command prompt with an output to csv file. To do this, run idql32.exe via command prompt
    1. Open the command prompt and type:
      • idql32.exe -u(username) -p(password) (docbase) >>c:\users\(username)\desktop\dqloutput.txt
      • idql32.exe -ujose -pMYPASS MYREPOSITORY>>c:\users\jderas\desktop\dqloutput.csv
    2. Run query
    3. All values will be saved in your dqloutput.csv file

[DFC_CONT_CANT_ACCESS_FILE]

Issue: When trying to load a file in Documentum EPFM you get the following error:

[DFC_CONT_CANT_ACCESS_FILE] Cannot access file //filserver/file.pdf

Unexpected critical error occurred whilst attempting to process this load

dfc_critical_error

Solution:

  1. Make sure the file exist and the location is correct
  2. Check the extension and make sure it is correct.
    1. for example if on the file server the file is test.pdf and in your loading document it is listed as .PDF change the file name to match.
  3. Retry loading the document.

Unexpected Critical Error occurred whilst attempting to process this load

When trying to load a document after validation in Documentum EPFM, you might come across the following error:

Unexpected Critical Error occurred whilst attempting to process this load

An error occurred during temporary file import

critical_error

  1. You will first want to check the load spreadsheet and make sure there are no validation errors that might make Documentum throw his error out.
  2. If all documents are validated without any issues the issue is between the appserver and file server.
  3. If you know where the file server is located make sure the folder and all subfolders have the correct permissions. If you don’t check this post to find out out. 
  4. Specifically the folder should have the dmadmin account and the user uploading the file with full permissions.
  5. If they do contain the rights the quickest way to test is to replace all child permission from the top level folder.
    • This will correct all permissions from the top level folder and down
  6. Once you adjust the permissions then try loading the document again.