import oauth2client
import oauth2client.file
import oauth2client.client
+import googleapiclient.errors
#Not sure what the distribution approach here is...
gcal_client_id = '805127902516-ptbbtgpq9o8pjr6r3k6hsm60j589o85u.apps.googleusercontent.com'
gcal_acct.events().delete(calendarId=gcal_id,
eventId=events[ev_id].gcal_link,
sendUpdates="none").execute()
+
+def update_ex_to_gcal(ex_acct,
+ gcal_acct,gcal_tz,
+ events,changed,
+ gcal_id="primary"):
+ for ev_id in changed:
+ event = get_ex_event_by_itemid(ex_acct.calendar,ev_id)
+ if not event.is_recurring:
+ gevent = build_gcal_event_from_ex(event,gcal_tz)
+ gevent = gcal_acct.events().update(calendarId=gcal_id,
+ eventId=event.gcal_link,
+ body=gevent,
+ sendUpdates="none").execute()
+ else:
+ logger.warning("recurring events not yet supported")
+
+def match_ex_to_gcal(ex_acct,gcal_acct,gcal_tz,events,gcal_id="primary"):
+ recur = 0
+ matched = 0
+ skipped = 0
+ for ev_id in events:
+ event = get_ex_event_by_itemid(ex_acct.calendar,ev_id)
+ if event.is_recurring:
+ recur += 1
+ continue
+ elif event.gcal_link is not None:
+ skipped += 1
+ continue
+ matches = gcal_acct.events().list(calendarId=gcal_id,
+ timeMin=event.start.isoformat(),
+ timeMax=event.end.isoformat()).execute()
+ for ge in matches['items']:
+ if ge['summary'].strip()==event.subject.strip():
+ logger.info("Matching '%s' starting at %s" % (event.subject,
+ event.start.isoformat()))
+ event.gcal_link = ge['id']
+ event.is_response_requested=None
+ event.save(update_fields=["gcal_link"])
+# event.save(conflict_resolution="NeverOverwrite")
+ events[event.item_id] = events[event.item_id]._replace(changekey=event.changekey,gcal_link=event.gcal_link)
+ gevent = {}
+ gevent["start"] = ge["start"]
+ gevent["end"] = ge["end"]
+ gevent["extendedProperties"]={"shared": {"ex_id": event.item_id}}
+ try:
+ gcal_acct.events().update(calendarId=gcal_id,
+ eventId=event.gcal_link,
+ body=gevent,
+ sendUpdates="none").execute()
+ #this may fail if we don't own the event
+ except googleapiclient.errors.HttpError as err:
+ if err.resp.status == 403:
+ pass
+ matched += 1
+ break
+ logger.info("Matched %d events, skipped %d with existing link, and %d recurring ones" % (matched,skipped,recur))
def get_gcal_cred():
#each such file can only store a single credential
#delete op needs the "cache" set, as that has the link ids in
#for events that are now deleted
del_ex_to_gcal(ex_account,gcal_account,cache,deleted)
+ update_ex_to_gcal(ex_account,gcal_account,gcal_tz,current,changed)
+ else:
+ match_ex_to_gcal(ex_account,gcal_account,gcal_tz,current)
with open(cachepath,"wb") as f:
pickle.dump(current,f)