import requests
import json
from datetime import datetime, timedelta
class TheTie:
def __init__(self):
self.base_url = 'https://terminal.thetie.io/v1'
self.api_key = 'api_key_here' # Fetch from env/credentials
# Call this on initial setup and every so often in order to stay synced with our additions/deletions
def sync_entities(self):
self.sync_sources()
self.sync_collections()
self.sync_coins_to_my_coinset()
self.sync_tags()
# GET /news_feed_subscriptions
def get_news_feed_ids(self):
response = requests.get(self.base_url + '/news_feed_subscriptions',
headers= { 'Authorization': "Bearer " + self.api_key })
news_feed_subscriptions = response.json()['data']
# Print out the IDs or store them, so you know which IDs to pass into the /news call
for nfs in news_feed_subscriptions:
print(nfs['news_feed']['name'], ':', nfs['news_feed_id'])
# GET /news
def get_news(self, news_feed_id, start_datetime=None, end_datetime=None, limit=50):
response = requests.get(self.base_url + '/news',
query= {
'news_feed_id': news_feed_id,
'start_datetime': start_datetime.strftime("%Y-%m-%d %H:%M:%S") if start_datetime else None,
'end_datetime': end_datetime.strftime("%Y-%m-%d %H:%M:%S") if end_datetime else None,
'limit': limit
},
headers= { 'Authorization': "Bearer " + self.api_key })
return response.json()['data']
# To paginate historical news
# Response is datetime descending, so paginate backwards
# Datetime params assume DateTime object
def get_news_with_pagination(self, news_feed_id, start_datetime=None, end_datetime=datetime.utcnow(), limit=500):
if start_datetime == None or start_datetime >= end_datetime:
raise 'start_datetime needs to be before end_datetime'
while end_datetime <= start_datetime:
data = self.get_news(news_feed_id, start_datetime=start_datetime, end_datetime=end_datetime, limit=limit)
if len(data) == 0:
break
self.store_news(data) # Store data along the way in case a request fails
min_datetime = data[-1]['timestamp']
end_datetime = datetime.strptime(min_datetime, '%Y-%m-%dT%H:%M:%S.%fZ') - timedelta(seconds=1)
sleep(0.5) # avoid rate limit
def store_news(self, data):
# Up to you!
# Tip: make sure to handle a "find or create by" for any tags, sources, or collections associated...
# ...with a news item that you haven't yet synced
pass
def sync_sources(self):
print('Syncing sources...')
response = requests.get(self.base_url + '/sources',
headers= { 'Authorization': "Bearer " + self.api_key })
sources = response.json()['data']
for source in sources:
# Store source["id"] and source["name"]
source = source
print('Done!')
def sync_collections(self):
print('Syncing collections...')
response = requests.get(self.base_url + '/collections',
headers= { 'Authorization': "Bearer " + self.api_key })
collections = response.json()['data']
for collection in collections:
# Store collection["id"] and collection["name"]
collection = collection
print('Done!')
# GET /coins
def get_coins(self):
response = requests.get(self.base_url + '/coins',
headers= { 'Authorization': "Bearer " + self.api_key })
return response.json()['data']
def sync_coins_to_my_coinset(self):
print('Syncing coins...')
# Insert code: Retrieve full set of your coins from your database
coins = self.get_coins()
for coin in coins:
# Match up your database's coin with The TIE's coin
# Techniques for matching:
# 1. Based on an existing map you have for CMC or Coingecko
cmc_id = coin['aliases']['coin_market_cap_id']
coingecko_id = coin['aliases']['coingecko_id']
# 2. Based on ticker (at risk for overlap with different coins)
ticker = coin['ticker']
# 3. Based on name (not perfect, but can help if needed)
name = coin['name']
# Then store our UID, which you'll use for future API calls and responses.
# Our UID will never change for a coin, while name and ticker may.
uid = coin['uid']
print('Done!')
def sync_tags(self):
print('Syncing tags...')
response = requests.get(self.base_url + '/tags',
headers= { 'Authorization': "Bearer " + self.api_key })
tag_groups = response.json()['data']
for tag_group in tag_groups:
# Optional whether you want to store anything about the tag_group
# Ex: Coins, Companies, General Topics, SigDevs, Low Quality
for tag in tag_group['tags']:
# Store tag["id"] and tag["name"]
if tag_group == 'Coins':
# You may want to store the tag["id"] as a tag_id column on your Coins table/object
pass
print('Done')
require 'httparty'
require 'json'
require 'date'
class TheTie
def initialize
@base_url = 'https://terminal.thetie.io/v1'
@api_key = 'api_key_here' # Fetch from env/credentials
end
# Call this on initial setup and every so often in order to stay synced with our additions/deletions
def sync_entities
sync_sources
sync_collections
sync_coins_to_my_coinset
sync_tags
end
# GET /news_feed_subscriptions
def get_news_feed_ids
response = HTTParty.get(@base_url + '/news_feed_subscriptions',
headers: { 'Authorization' => "Bearer #{@api_key}" })
news_feed_subscriptions = JSON.parse(response.body)['data']
# Print out the IDs or store them, so you know which IDs to pass into the /news call
news_feed_subscriptions.each do |nfs|
puts "#{nfs.dig('news_feed', 'name')}: #{nfs['news_feed_id']}"
end
end
# GET /news
def get_news(news_feed_id, start_datetime: nil, end_datetime: nil, limit: 50)
response = HTTParty.get(@base_url + '/news',
query: {
news_feed_id: news_feed_id,
start_datetime: start_datetime&.to_s,
end_datetime: end_datetime&.to_s,
limit: limit
},
headers: { 'Authorization' => "Bearer #{@api_key}" })
JSON.parse(response.body)['data']
end
# To paginate historical news
# Response is datetime descending, so paginate backwards
# Datetime params assume DateTime object
def get_news_with_pagination(news_feed_id, start_datetime: nil, end_datetime: DateTime.now.new_offset(0), limit: 500)
raise 'start_datetime needs to be before end_datetime' if start_datetime.nil? || start_datetime >= end_datetime
until end_datetime <= start_datetime
data = get_news(news_feed_id, start_datetime: start_datetime, end_datetime: end_datetime, limit: limit)
break if data.empty?
store_news(data) # Store data along the way in case a request fails
min_datetime = data.last['timestamp']
end_datetime = DateTime.parse(min_datetime) - 1
sleep(0.5) # avoid rate limit
end
end
def store_news(_data)
# Up to you!
# Tip: make sure to handle a "find or create by" for any tags, sources, or collections associated...
# ...with a news item that you haven't yet synced
end
def sync_sources
puts 'Syncing sources...'
response = HTTParty.get(@base_url + '/sources',
headers: { 'Authorization' => "Bearer #{@api_key}" })
sources = JSON.parse(response.body)['data']
sources.each do |source|
# Store source["id"] and source["name"]
end
puts 'Done!'
end
def sync_collections
puts 'Syncing collections...'
response = HTTParty.get(@base_url + '/collections',
headers: { 'Authorization' => "Bearer #{@api_key}" })
collections = JSON.parse(response.body)['data']
collections.each do |collection|
# Store collection["id"] and collection["name"]
end
puts 'Done!'
end
# GET /coins
def get_coins
response = HTTParty.get(@base_url + '/coins',
headers: { 'Authorization' => "Bearer #{@api_key}" })
JSON.parse(response.body)['data']
end
def sync_coins_to_my_coinset
puts 'Syncing coins...'
# Insert code: Retrieve full set of your coins from your database
get_coins.each do |coin|
# Match up your database's coin with The TIE's coin
# Techniques for matching:
# 1. Based on an existing map you have for CMC or Coingecko
cmc_id = coin.dig('aliases', 'coin_market_cap_id')
coingecko_id = coin.dig('aliases', 'coingecko_id')
# 2. Based on ticker (at risk for overlap with different coins)
ticker = coin['ticker']
# 3. Based on name (not perfect, but can help if needed)
name = coin['name']
# Then store our UID, which you'll use for future API calls and responses.
# Our UID will never change for a coin, while name and ticker may.
uid = coin['uid']
end
puts 'Done!'
end
def sync_tags
puts 'Syncing tags...'
response = HTTParty.get(@base_url + '/tags',
headers: { 'Authorization' => "Bearer #{@api_key}" })
tag_groups = JSON.parse(response.body)['data']
tag_groups.each do |tag_group|
# Optional whether you want to store anything about the tag_group
# Ex: Coins, Companies, General Topics, SigDevs, Low Quality
tag_group['tags'].each do |_tag|
# Store tag["id"] and tag["name"]
if tag_group == 'Coins'
# You may want to store the tag["id"] as a tag_id column on your Coins table/object
end
end
end
puts 'Done'
end
end