summaryrefslogtreecommitdiff
path: root/src/main/Analyzor.java
blob: b8a026c25fc7a72dd5755ce187c5794059cb261e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
package main;

import analysis.BrandChecker;
import database.NamedPreparedStatement;
import database.QueryUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.Scanner;

/**
 * The sentiment analysis class that rates tweets based on a unigram and bigram
 * set of weights.
 */
public class Analyzor {

    /**
     * The map that matches single words to their weights.
     */
    private final HashMap<String, Double> unimap = new HashMap();

    /**
     * The map that matches word pairs to their weights.
     */
    private final HashMap<String, Double> bimap = new HashMap();

    /**
     * The results of a query, maybe return from query().
     */
    private ResultSet data;

    /**
     * The persistent connection to the database.
     */
    private final Connection connection;

    /**
     * @param connection An open connection to the database.
     */
    public Analyzor(Connection connection) {
        this.connection = connection;
    }

    /**
     * Read the unigram and bigram lexica.
     *
     * @throws FileNotFoundException
     */
    public void readLexicon() throws FileNotFoundException {
        if (!unimap.isEmpty()) {
            // data is already read.
            return;
        }
        System.err.println("Trying to read lexicons...");
        // A unigram is in the format (WS = whitespace):
        // word <WS> rating <WS> ??? <WS> ??
        // A bigram has an two WS-separated words instead of one.
        try (Scanner uniScanner = new Scanner(new File("unigrams-pmilexicon.txt"));
                Scanner biScanner = new Scanner(new File("bigrams-pmilexicon.txt"));) {
            //Fill the map of unigrams
            int lineno = 1;
            while (uniScanner.hasNext()) {

                String words = uniScanner.next();
                Double d = Double.valueOf(uniScanner.next());
                unimap.put(words.toLowerCase(), d);
                if (uniScanner.hasNextLine()) {
                    uniScanner.nextLine();
                }
                lineno++;

            }

            //fill the map of bigrams
            while (biScanner.hasNext()) {
                String words = biScanner.next() + " " + biScanner.next();
                bimap.put(words.toLowerCase(), Double.valueOf(biScanner.next()));
                if (biScanner.hasNextLine()) {
                    biScanner.nextLine();
                }
            }
        }
        System.err.println("Lexicons are read.");
    }

    /**
     * Executes a query that the analyzer can analyze.
     *
     * @param query The query string to execute.
     * @throws SQLException When database connection isn't available.
     */
    public void query(String query) throws SQLException {
        PreparedStatement statement;
        //make a connection to the database and execute the query
        statement = connection.prepareStatement(query);
        data = statement.executeQuery();
    }

    /**
     * Run a sentiment analysis and fill the database with the output.
     *
     * @param query The sql text for the query.
     * @throws SQLException
     * @throws IOException
     */
    public void sentimentAnalysis(String query) throws SQLException, IOException {
        query(query);

        //read the lexicons
        readLexicon();

        //go to the start of te dataset
        if (data == null) {
            System.err.println("data is empty, try querying first");
            return;
        }

        Double value;
        String text;

        //for all tuples
        while (data.next()) {
            //get the text
            text = data.getString("text");
            text = splitPunctToWords(text);
            // test is the tweet text you are going to analyze
            String[] words = text.split("\\s+"); // text splitted into separate words
            double positiverate = 0; // positive rating

            // Rate the text with unigrams
            for (String word : words) {
                value = unimap.get(word);
                if (value != null) {
                    positiverate += unimap.get(word);
                }
            }
            // Rate the text with bigrams
            for (int i = 0; i < words.length - 1; i++) {
                String pair = words[i] + " " + words[i + 1];
                value = bimap.get(pair);
                if (value != null) {
                    positiverate += bimap.get(pair);
                }
            }
            //insert the rating into the database
            NamedPreparedStatement m_insertRating;
            m_insertRating = new NamedPreparedStatement(connection, QueryUtils.insertRating);
            QueryUtils.setInsertParams(m_insertRating, data.getLong("tweetid"), data.getString("brand"), (int) (positiverate * 10));
            m_insertRating.executeUpdate();
            //don't print the rate
            //System.out.println(text + ": " + (int) (positiverate * 10));
        }
    }

    /**
     * Make a wordcloud of the results of some query.
     *
     * @param query The sql text for a query.
     * @throws SQLException
     * @throws FileNotFoundException
     * @throws UnsupportedEncodingException
     */
    public void makeWordCloud(String query) throws SQLException, FileNotFoundException, UnsupportedEncodingException {

        query(query);
        //go to the start of the ResultSet data
        if (data == null) {
            System.err.println("data is empty, try querying first");
            return;
        }

        String text;
        String[] words;
        HashMap<String, Integer> wordcloud = new HashMap<>();

        while (data.next()) {
            //get the text
            text = data.getString("text");
            //remove punctuation, convert to lowercase and split on words
            text = removePunct(text);
            text = text.toLowerCase();
            words = text.split("\\s+");
            
            for (String word : words) {
                if(wordcloud.containsKey(word)){
                    wordcloud.put(word, wordcloud.get(word));
                }
                else{
                    wordcloud.put(word, 1);
                }
            }
        }
        //print the words and their frequency in a csv file
        PrintWriter writer = new PrintWriter("wordcloud.csv", "UTF-8");
        
        writer.println("word,count");
                
        for(Entry e : wordcloud.entrySet()){
            writer.println(e.getKey() + ", " + e.getValue());
        }
        
        writer.close();
        System.out.println("csv file made, please put it next to wordcloud.html and run this");
    }

    //generate csv for disco from the query
    public void disco(String query) throws SQLException, FileNotFoundException, UnsupportedEncodingException {
        //do the query
        query(query);
        PrintWriter writer = new PrintWriter("output.csv", "UTF-8");
        //print the first row
        for (int i = 1; i < data.getMetaData().getColumnCount(); i++) {
            writer.print(data.getMetaData().getColumnLabel(i) + ", ");
        }
        writer.println(data.getMetaData().getColumnLabel(data.getMetaData().getColumnCount()));
        //print the values
        while (data.next()) {
            for (int i = 1; i < data.getMetaData().getColumnCount(); i++) {
                if (data.getObject(i) == null) {
                    writer.print(", ");
                } else {
                    writer.print(data.getObject(i).toString().replaceAll("[,\n]", " ") + ", ");
                }
            }
            if (data.getObject(data.getMetaData().getColumnCount()) == null) {
                writer.println("0");
            } else {
                writer.println(data.getObject(data.getMetaData().getColumnCount()).toString().replace(",", " "));
            }
        }
        writer.close();
    }

    public void getBrands() throws SQLException {
        PreparedStatement statement;
        //make a connection to the database and execute the query
        statement = connection.prepareStatement("delete from mentionsbrand");
        statement.executeUpdate();
        BrandChecker checker = new BrandChecker("brandrules.txt");
        query("select * from tweet");
        NamedPreparedStatement m_insertBrand = new NamedPreparedStatement(connection, QueryUtils.insertBrand);
        while (data.next()) {
            List<String> brands = checker.getBrands(data.getString("text"));
            if (brands.isEmpty()) {
                QueryUtils.setInsertBrandParams(m_insertBrand, data.getLong("tweetid"), "no");
                m_insertBrand.executeUpdate();
            } else {
                for (String brand : brands) {
                    QueryUtils.setInsertBrandParams(m_insertBrand, data.getLong("tweetid"), brand);
                    m_insertBrand.executeUpdate();
                }
            }

        }
    }

    //replaces punctuation so it will be splitted
    //also removes urls
    private String splitPunctToWords(String text) {
        text = text.replaceAll("https?://\\S*", "");
        text = text.replaceAll("[!?):;\"']", " $0");
        text = text.replaceAll("[.,-](\\s|$)", " $0");
        text = text.replaceAll("\\s[(\"']", "$0 ");
        return text;
    }

    //removes punctuation
    //also removes urls
    private String removePunct(String text) {
        text = text.replaceAll("https?://\\S*", "");
        text = text.replaceAll("[.,!?();\"'-]", " ");
        text = text.replaceAll("[^\\x00-\\x7F]", " ");
        return text;
    }
}