import spacy
import sys
import json

def process_sentence(sentence):
    # Load the English model
    nlp = spacy.load('en_core_web_sm')
    
    # Process the input text
    doc = nlp(sentence)
    
    # Create arrays for text, POS, and dependencies
    textarray = [token.text for token in doc]
    posarray = [token.pos_ for token in doc]
    deparray = [token.dep_ for token in doc]
    
    # Return the arrays as a JSON object
    return json.dumps({
        "textarray": textarray,
        "posarray": posarray,
        "deparray": deparray
    })

if __name__ == "__main__":
    # Read the sentence from the command line argument
    sentence = sys.argv[1]
    #sentence = "this tiny frog is a jumping master"
    # Process the sentence and output JSON
    print(process_sentence(sentence))
    #process_sentence(sentence)
