import requests
import json
import time
from datetime import datetime

class TheTie:
    def __init__(self):
        self.base_url = 'https://terminal.thetie.io/v1'
        self.api_key = 'api_key_here' # Fetch from env/credentials

    # GET /coins
    def get_coins(self):
        response = requests.get(self.base_url + '/coins', headers = { 'Authorization': "Bearer " + self.api_key })
        return response.json()['data']

    def sync_coins_to_my_coinset(self):
        # Insert code: Retrieve full set of your coins from your database
        for coin in self.get_coins():
            # Match up your database's coin with The TIE's coin
            # Techniques for matching:
            # 1. Based on an existing map you have for CMC or Coingecko
            cmc_id = coin.get('aliases', {}).get('coin_market_cap_id')
            coingecko_id = coin.get('aliases', {}).get('coingecko_id')
            # 2. Based on ticker (at risk for overlap with different coins)
            ticker = coin.get('ticker')
            # 3. Based on name (not perfect, but can help if needed)
            name = coin.get('name')
            # Then store our UID, which you'll use for future API calls and responses.
            # Our UID will never change for a coin, while name and ticker may.
            uid = coin.get('uid')

    # GET /recent
    def get_latest_data(self, coin_uids, items = None):
        if isinstance(coin_uids, list):
            coin_uids = ','.join(coin_uids)
        # `items` are a comma-separated string of metric names that you want returned. Leave blank for all.
        # Ex: 'daily_sentiment,tweet_volume,price'
        response = requests.get(self.base_url + '/recent',
                            params = {
                                'coin_uids' : coin_uids,
                                'items' : items
                            },
                            headers = { 'Authorization': "Bearer " + self.api_key })
        return response.json()['data']

    # GET /historical
    # Datetime params assume DateTime object
    def get_historical_data(self, coin_uid, items = None, start_datetime = None, end_datetime = None, frequency = '1min'):
        response = requests.get(self.base_url + '/historical',
                            params = {
                                'coin_uid' : coin_uid,
                                'items' : items, # optional
                                'start_datetime' : start_datetime.strftime('%Y-%m-%d %H:%M') if start_datetime else None, # optional
                                'end_datetime' : end_datetime.strftime('%Y-%m-%d %H:%M') if end_datetime else None, # optional
                                'frequency' : frequency # optional
                            },
                            headers = { 'Authorization': "Bearer " + self.api_key })
        return response.json()['data']

    # To paginate historical data
    # Response is datetime descending, so paginate backwards
    # Datetime params assume DateTime object
    def get_historical_data_with_pagination(self, coin_uid, items = None, start_datetime = None, end_datetime = datetime.utcnow(), frequency = '1min'):
        if start_datetime is None or start_datetime >= end_datetime:
            raise 'start_datetime needs to be before end_datetime'
        while end_datetime > start_datetime:
            print('hi')
            data = self.get_historical_data(coin_uid, items=items, start_datetime=start_datetime, end_datetime=end_datetime, frequency=frequency)
            if not data:
                break
            self.store_historical_data(data) # Store data along the way in case a request fails
            min_datetime = data[-1]['datetime']
            end_datetime = datetime.utcnow().strptime(min_datetime, '%Y-%m-%dT%H:%M:%S.%fZ')
            time.sleep(0.5) # avoid rate limit

    def store_historical_data(self, data):
        # Up to you!
        print(data)
        pass

    # GET /universe
    def get_latest_universe_data(self):
        response = requests.get(self.base_url + '/universe',
                            headers = { 'Authorization': "Bearer " + self.api_key })
        return response.json()['data']
require 'httparty'
require 'json'
require 'date'

class TheTie
  def initialize
    @base_url = 'https://terminal.thetie.io/v1'
    @api_key = 'api_key_here' # Fetch from env/credentials
  end

  # GET /coins
  def get_coins
    response = HTTParty.get(@base_url + '/coins',
                            headers: { 'Authorization' => "Bearer #{@api_key}" })
    JSON.parse(response.body)['data']
  end

  def sync_coins_to_my_coinset
    # Insert code: Retrieve full set of your coins from your database
    get_coins.each do |coin|
      # Match up your database's coin with The TIE's coin
      # Techniques for matching:
      # 1. Based on an existing map you have for CMC or Coingecko
      cmc_id = coin.dig('aliases', 'coin_market_cap_id')
      coingecko_id = coin.dig('aliases', 'coingecko_id')
      # 2. Based on ticker (at risk for overlap with different coins)
      ticker = coin['ticker']
      # 3. Based on name (not perfect, but can help if needed)
      name = coin['name']
      # Then store our UID, which you'll use for future API calls and responses.
      # Our UID will never change for a coin, while name and ticker may.
      uid = coin['uid']
    end
  end

  # GET /recent
  def get_latest_data(coin_uids, items = nil)
    coin_uids = coin_uids.join(',') if coin_uids.is_a? Array
    # `items` are a comma-separated string of metric names that you want returned. Leave blank for all.
    # Ex: 'daily_sentiment,tweet_volume,price'
    response = HTTParty.get(@base_url + '/recent',
                            params: {
                              coin_uids: coin_uids,
                              items: items
                            },
                            headers: { 'Authorization' => "Bearer #{@api_key}" })
    JSON.parse(response.body)['data']
  end

  # GET /historical
  # Datetime params assume DateTime object
  def get_historical_data(coin_uid, items = nil, start_datetime = nil, end_datetime = nil, frequency = '1min')
    response = HTTParty.get(@base_url + '/historical',
                            query: {
                              coin_uid: coin_uid,
                              items: items, # optional
                              start_datetime: start_datetime&.strftime('%Y-%m-%d %H:%M'), # optional
                              end_datetime: end_datetime&.strftime('%Y-%m-%d %H:%M'), # optional
                              frequency: frequency # optional
                            },
                            headers: { 'Authorization' => "Bearer #{@api_key}" })
    JSON.parse(response.body)['data']
  end

  # To paginate historical data
  # Response is datetime descending, so paginate backwards
  # Datetime params assume DateTime object
  def get_historical_data_with_pagination(coin_uid, items = nil, start_datetime = nil, end_datetime = DateTime.now.new_offset(0), frequency = '1min')
    raise 'start_datetime needs to be before end_datetime' if start_datetime.nil? || start_datetime >= end_datetime

    until end_datetime <= start_datetime
      data = get_historical_data(coin_uid, items, start_datetime, end_datetime, frequency)
      break if data.empty?

      store_historical_data(data) # Store data along the way in case a request fails
      min_datetime = data.last['datetime']
      end_datetime = DateTime.parse(min_datetime)
      sleep(0.5) # avoid rate limit
    end
  end

  def store_historical_data(_data)
    # Up to you!
  end

  # GET /universe
  def get_latest_universe_data
    response = HTTParty.get(@base_url + '/universe',
                            headers: { 'Authorization' => "Bearer #{@api_key}" })
                       .body
    JSON.parse(response)['data']
  end
end