|
| 1 | +#!/usr/bin/env -S vala workbench.vala --pkg libadwaita-1 --pkg libsoup-3.0 --pkg json-glib-1.0 |
| 2 | + |
| 3 | +public errordomain FetchError { |
| 4 | + FAILED_REQUEST, |
| 5 | + FAILED_TO_PARSE |
| 6 | +} |
| 7 | + |
| 8 | +private Gtk.TextView article_text_view; |
| 9 | +private Gtk.Label article_title; |
| 10 | + |
| 11 | +public async void main () { |
| 12 | + article_text_view = (Gtk.TextView) workbench.builder.get_object ("article_text_view"); |
| 13 | + article_title = (Gtk.Label) workbench.builder.get_object ("article_title"); |
| 14 | + try { |
| 15 | + yield fetch_wikipedia_todays_featured_article (); |
| 16 | + } catch (Error e) { |
| 17 | + critical (e.message); |
| 18 | + } |
| 19 | +} |
| 20 | + |
| 21 | +private async void fetch_wikipedia_todays_featured_article () throws Error { |
| 22 | + var date = new DateTime.now_local (); |
| 23 | + var http_session = new Soup.Session (); |
| 24 | + |
| 25 | + // https://api.wikimedia.org/wiki/Feed_API/Reference/Featured_content |
| 26 | + string language = "en"; |
| 27 | + string url = |
| 28 | + @"https://api.wikimedia.org/feed/v1/wikipedia/$language/featured/$(date.format ("%Y/%m/%d"))"; |
| 29 | + |
| 30 | + var message = new Soup.Message ("GET", url); |
| 31 | + |
| 32 | + Bytes message_bytes = yield http_session.send_and_read_async (message, Priority.DEFAULT, null); |
| 33 | + if (message.status_code != 200) { |
| 34 | + throw new FetchError.FAILED_REQUEST (@"Failed Request. HTTP Status: $(message.status_code)"); |
| 35 | + } |
| 36 | + |
| 37 | + unowned uint8[] data = message_bytes.get_data (); |
| 38 | + string json_data = (string) data; |
| 39 | + parse_json_response (json_data); |
| 40 | +} |
| 41 | + |
| 42 | + |
| 43 | +private void parse_json_response (string json) throws Error { |
| 44 | + // https://valadoc.org/json-glib-1.0/Json.Parser.html |
| 45 | + var parser = new Json.Parser (); |
| 46 | + parser.load_from_data (json); |
| 47 | + |
| 48 | + Json.Node root_node = parser.get_root (); |
| 49 | + // https://valadoc.org/json-glib-1.0/Json.Object.html |
| 50 | + Json.Object root_object = root_node.get_object (); |
| 51 | + |
| 52 | + /* |
| 53 | + * The JSON response must have the following structure: |
| 54 | + * { |
| 55 | + * "tfa": { |
| 56 | + * "extract": "Content of the Article", |
| 57 | + * "titles": { |
| 58 | + * "normalized": "Normalized Title", |
| 59 | + * ... |
| 60 | + * }, |
| 61 | + * ... |
| 62 | + * }, |
| 63 | + * ... |
| 64 | + * } |
| 65 | + */ |
| 66 | + Json.Object? tfa = root_object.get_object_member ("tfa"); |
| 67 | + if (tfa == null) { |
| 68 | + throw new FetchError.FAILED_TO_PARSE ("`tfa` not found"); |
| 69 | + } |
| 70 | + |
| 71 | + if (!tfa.has_member ("extract")) { |
| 72 | + throw new FetchError.FAILED_TO_PARSE ("Article's `extract` not found"); |
| 73 | + } |
| 74 | + article_text_view.buffer.text = tfa.get_string_member ("extract"); |
| 75 | + |
| 76 | + Json.Object? titles = tfa.get_object_member ("titles"); |
| 77 | + if (titles == null || !titles.has_member ("normalized")) { |
| 78 | + throw new FetchError.FAILED_TO_PARSE ("Article's `titles` not found"); |
| 79 | + } |
| 80 | + |
| 81 | + article_title.label = titles.get_string_member ("normalized"); |
| 82 | +} |
0 commit comments